def __init__(self): self.original_root = "../../saved_original_for_generator/" self.data_pair1_root = "../../saved_pair1/" self.data_pair2_root = "../../saved_pair2/" self.data_mat_root = "../../saved_matrix/" self.data_mat_root_origin = "../../saved_matrix_unprocessed/" self.data_signal_root = "../../saved_stastics_for_generator/" # noise type self.noise_selector = [ 'gauss_noise', 'gauss_noise', 'gauss_noise', 'gauss_noise' ] self.save_matlab_root = "../../saved_matlab/" self.self_check_path_create(self.save_matlab_root) self.H = 1024 self.W = 832 self.matlab = Save_Signal_matlab() # read the signals just use the existing path self.saved_stastics = MY_ANALYSIS() self.saved_stastics.all_statics_dir = os.path.join( self.data_signal_root, 'signals.pkl') self.shift_predictor = Shift_Predict() self.path_DS = self.saved_stastics.read_my_signal_results() self.path_DS.all_statics_dir = self.saved_stastics.all_statics_dir if visdom_show_flag == True: self.vis_ploter = VisdomLinePlotter()
def __init__(self, batch_size,image_size,path_size): self.data_pair1_root = "../dataset/For_pair_IMG_Train/pair1/" # assume this one is the newest frame self.data_pair2_root = "../dataset/For_pair_IMG_Train/pair2/" # assume this one is the historical image self.data_mat_root = "../dataset/For_pair_IMG_Train/CostMatrix/" self.signalroot ="../dataset/For_pair_IMG_Train/saved_stastics/" self.read_all_flag=0 self.read_record =0 self.folder_pointer = 0 self.slice_record=1 self.batch_size = batch_size self.img_size = Resample_size self.path_size = Path_length self.mat_size = Mat_size self.img_size2 = Resample_size2 # Initialize the inout for the tainning self.input_mat = np.zeros((batch_size,1,Mat_size,Resample_size)) #matri self.input_path = np.zeros((batch_size,Path_length))#path self.input_pair1 = np.zeros((batch_size,1,Resample_size2,Resample_size))#pairs self.input_pair2 = np.zeros((batch_size,1,Resample_size2,Resample_size)) self.input_pair3 = np.zeros((batch_size,1,Resample_size2,Resample_size)) self.input_pair4 = np.zeros((batch_size,1,Resample_size2,Resample_size)) # the number isdeter by teh mat num self.all_dir_list = os.listdir(self.data_mat_root) self.folder_num = len(self.all_dir_list) # create the buffer list(the skill to create the list) self.folder_mat_list = [None]*self.folder_num self.folder_pair1_list = [None]*self.folder_num self.folder_pair2_list = [None]*self.folder_num self.signal = [None]*self.folder_num # create all the folder list and their data list number_i = 0 # all_dir_list is subfolder list #creat the image list point to the STASTICS TIS list saved_stastics = MY_ANALYSIS() #read all the folder list of mat and pairs and path for subfold in self.all_dir_list: #the mat list this_folder_list = os.listdir(os.path.join(self.data_mat_root, subfold)) this_folder_list2 = [ self.data_mat_root +subfold + "/" + pointer for pointer in this_folder_list] self.folder_mat_list[number_i] = this_folder_list2 #the pair1 list this_folder_list = os.listdir(os.path.join(self.data_pair1_root, subfold)) this_folder_list2 = [ self.data_pair1_root +subfold + "/" + pointer for pointer in this_folder_list] self.folder_pair1_list[number_i] = this_folder_list2 #the pair2 list this_folder_list = os.listdir(os.path.join(self.data_pair2_root, subfold)) this_folder_list2 = [ self.data_pair2_root +subfold + "/" + pointer for pointer in this_folder_list] self.folder_pair2_list[number_i] = this_folder_list2 #the supervision signal list #change the dir firstly before read saved_stastics.all_statics_dir = os.path.join(self.signalroot, subfold, 'signals.pkl') self.signal[number_i] = saved_stastics.read_my_signal_results() number_i +=1
def __init__(self): self.original_root = "../../saved_original_for_generator/" self.data_pair1_root = "../../saved_pair1/" self.data_pair2_root = "../../saved_pair2/" self.data_mat_root = "../../saved_matrix/" self.data_mat_root_origin = "../../saved_matrix_unprocessed/" self.data_signal_root = "../../saved_stastics_for_generator/" self.H = 1024 self.W = 780 # read the signals just use the existing path self.saved_stastics = MY_ANALYSIS() self.saved_stastics.all_statics_dir = os.path.join(self.data_signal_root, 'signals.pkl') self.path_DS = self.saved_stastics.read_my_signal_results() self.path_DS.all_statics_dir = self.saved_stastics.all_statics_dir
def __init__(self, batch_size, image_size, path_size): self.dataroot = "..\\dataset\\CostMatrix\\" self.signalroot = "..\\dataset\\saved_stastics\\" self.read_all_flag = 0 self.read_record = 0 self.folder_pointer = 0 self.batch_size = batch_size self.img_size = image_size self.path_size = path_size self.input_image = np.zeros((batch_size, 1, image_size, image_size)) self.input_path = np.zeros((batch_size, path_size)) self.all_dir_list = os.listdir(self.dataroot) self.folder_num = len(self.all_dir_list) # create the buffer list self.folder_list = [None] * self.folder_num self.signal = [None] * self.folder_num # create all the folder list and their data list number_i = 0 # all_dir_list is subfolder list #creat the image list point to the STASTICS TIS list saved_stastics = MY_ANALYSIS() #read all the folder list for subfold in self.all_dir_list: #if(number_i==0): this_folder_list = os.listdir(os.path.join(self.dataroot, subfold)) this_folder_list2 = [ self.dataroot + subfold + "\\" + pointer for pointer in this_folder_list ] self.folder_list[number_i] = this_folder_list2 #change the dir firstly before read saved_stastics.all_statics_dir = os.path.join( self.signalroot, subfold, 'signals.pkl') self.signal[number_i] = saved_stastics.read_my_signal_results() number_i += 1
def __init__(self): self.original_root = "../../saved_original_for_generator/" self.data_pair1_root = "../../saved_pair1/" self.data_pair2_root = "../../saved_pair2/" self.data_mat_root = "../../saved_matrix/" self.data_mat_root_origin = "../../saved_matrix_unprocessed/" self.data_mat_root_augmented = "../../saved_matrix_augmented/" self.data_signal_root = "../../saved_stastics_for_generator/" if not os.path.exists(self.data_mat_root_augmented): os.mkdir(self.data_mat_root_augmented) print("Directory ", self.data_mat_root_augmented, " Created ") else: print("Directory ", self.data_mat_root_augmented, " already exists") self.H = 1024 self.W = 780 # read the signals just use the existing path self.saved_stastics = MY_ANALYSIS() self.saved_stastics.all_statics_dir = os.path.join( self.data_signal_root, 'signals.pkl') self.path_DS = self.saved_stastics.read_my_signal_results() self.path_DS.all_statics_dir = self.saved_stastics.all_statics_dir
#from path_finding import PATH Display_STD_flag = False Padd_zero_top = True Display_signal_flag = False Display_Matrix_flag = False save_matlab_flag = True device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") video_sizeH = 450 video_sizeW = 900 #videoout = cv2.VideoWriter(save_display_dir+'0output.avi', -1, 20.0, (video_sizeW,video_sizeH)) if Display_signal_flag == True: from analy import MY_ANALYSIS #show the stastics results saved_stastics = MY_ANALYSIS() saved_stastics = saved_stastics.read_my_signal_results() saved_stastics.display() def tranfer2circ_padding(img): H, W_ini = img.shape padding = np.zeros((Padding_H, W_ini)) if Padd_zero_top == True: img = np.append(padding, img, axis=0) circular = tranfer_frome_rec2cir(img) return circular def diplay_sequence():
Resample_size = Window_LEN R_len = 20 #read_start = 100 read_start = 0 Debug_flag = True global intergral_flag intergral_flag = 0 Branch_flag = 0 # 0 fusion, 1 A, 2 B if (Save_signal_flag == True): from analy import MY_ANALYSIS from analy import Save_signal_enum signal_saved = MY_ANALYSIS() class VIDEO_PEOCESS: #----------------------# #intepolate one image by rows def img_interpilate(image): h, w = image.shape for i in range(h): s = pd.Series(image[i, :]) image[i, :] = s.interpolate() return image #----------------------# #----------------------#
class DATA_Generator(object): def __init__(self): self.original_root = "../../saved_original_for_generator/" self.data_pair1_root = "../../saved_pair1/" self.data_pair2_root = "../../saved_pair2/" self.data_mat_root = "../../saved_matrix/" self.data_mat_root_origin = "../../saved_matrix_unprocessed/" self.data_signal_root = "../../saved_stastics_for_generator/" # noise type self.noise_selector = [ 'gauss_noise', 'gauss_noise', 'gauss_noise', 'gauss_noise' ] self.save_matlab_root = "../../saved_matlab/" self.self_check_path_create(self.save_matlab_root) self.H = 1024 self.W = 832 self.matlab = Save_Signal_matlab() # read the signals just use the existing path self.saved_stastics = MY_ANALYSIS() self.saved_stastics.all_statics_dir = os.path.join( self.data_signal_root, 'signals.pkl') self.shift_predictor = Shift_Predict() self.path_DS = self.saved_stastics.read_my_signal_results() self.path_DS.all_statics_dir = self.saved_stastics.all_statics_dir if visdom_show_flag == True: self.vis_ploter = VisdomLinePlotter() # if there is no path, generate thsi path def self_check_path_create(self, directory): try: os.stat(directory) except: os.mkdir(directory) # read the original signal and then close the startign with the end def close_the_origin_signal(self): read_id = 0 # read pointer initialization while (1): OriginalpathDirlist = os.listdir(self.original_root) # # #read the path and Image number from the signal file #get the Id of image which should be poibnt to #get the path path = self.path_DS.path_saving[read_id, :] path_l = len(path) long_path = np.append(path, path, axis=0) long_path = np.append(long_path, path, axis=0) #long_path = np.append(path[::-1],path,axis=0) #long_path = np.append(long_path,path[::-1],axis=0) long_path = gaussian_filter1d(long_path, 5) # the fileter parameters is 5 path_p = long_path[path_l:2 * path_l] #change the signal too self.path_DS.path_saving[read_id, :] = path_p self.path_DS.save() ## validation #steam[Len_steam-1,:,:] = original_IMG # un-correct #steam[Len_steam-2,:,:] = Shifted_IMG # correct #Costmatrix,shift_used = COSTMtrix.matrix_cal_corre_full_version3_2GPU(original_IMG,Shifted_IMG,0) ##Costmatrix = myfilter.gauss_filter_s (Costmatrix) # smooth matrix #show1 = Costmatrix #for i in range ( len(path)): # show1[int(path[i]),i]=254 #cv2.imwrite(self.data_mat_root + str(Image_ID) +".jpg", show1) print("[%s] is processed. test point time is [%f] " % (read_id, 0.1)) read_id += 1 pass def add_lines_to_matrix(self, matrix): value = 128 H, W = matrix.shape line_positions = np.arange(0, W - 2 * H, H) for lines in line_positions: for i in np.arange(0, H): matrix[i, lines + i] = value matrix[i, lines + i + 1] = value matrix[i, lines + i + 3] = value return matrix def random_min_clip_by_row(self, min1, min2, mat): rand = np.random.random_sample() rand = rand * (min2 - min1) + min1 H, W = mat.shape for i in np.arange(W): rand = np.random.random_sample() rand = rand * (min2 - min1) + min1 mat[:, i] = np.clip(mat[:, i], rand, 254) return mat def noisy(self, noise_typ, image): if (noise_typ == "none"): return image if noise_typ == "gauss_noise": row, col = image.shape mean = 0 var = 15 sigma = var**0.5 gauss = np.random.normal(mean, sigma, (row, col)) gauss = gauss.reshape(row, col) noisy = image + gauss return np.clip(noisy, 0, 254) elif noise_typ == 's&p': row, col = image.shape s_vs_p = 0.5 amount = 0.004 out = np.copy(image) # Salt mode num_salt = np.ceil(amount * image.size * s_vs_p) coords = [ np.random.randint(0, i - 1, int(num_salt)) for i in image.shape ] out[coords] = 1 # Pepper mode num_pepper = np.ceil(amount * image.size * (1. - s_vs_p)) coords = [ np.random.randint(0, i - 1, int(num_pepper)) for i in image.shape ] out[coords] = 0 return np.clip(out, 0, 254) elif noise_typ == 'poisson': vals = len(np.unique(image)) vals = 2**np.ceil(np.log2(vals)) noisy = np.random.poisson(image * vals) / float(vals) return np.clip(noisy, 0, 254) elif noise_typ == 'speckle': row, col = image.shape gauss = np.random.randn(row, col) gauss = gauss.reshape(row, col) noisy = image + image * gauss return np.clip(noisy, 0, 254) #the validation functionfor check the matrix and can also be used for validate the correction result def validation_shift(self, original_IMG, Shifted_IMG, path, Image_ID): Costmatrix, shift_used = COSTMtrix.matrix_cal_corre_full_version3_2GPU( original_IMG, Shifted_IMG, 0) if Clip_matrix_flag == True: #Costmatrix = np.clip(Costmatrix, 20,254) Costmatrix = self.random_min_clip_by_row(5, 30, Costmatrix) Shifted_IMG2 = Shifted_IMG shift = self.shift_predictor.predict(original_IMG, Shifted_IMG, Shifted_IMG2) path_deep = shift + path * 0 ##middle_point = PATH.calculate_ave_mid(mat) #path1,path_cost1=PATH.search_a_path(mat,start_point) # get the path and average cost of the path show1 = np.zeros((Costmatrix.shape[0], Costmatrix.shape[1], 3)) cv2.imwrite(self.data_mat_root_origin + str(Image_ID) + ".jpg", show1) show1[:, :, 0] = Costmatrix show1[:, :, 1] = Costmatrix show1[:, :, 2] = Costmatrix for i in range(len(path)): painter = min(path[i], Window_LEN - 1) #painter2= min(path_tradition[i],Window_LEN-1) painter3 = min(path_deep[i], Window_LEN - 1) show1[int(painter), i, :] = [255, 255, 255] #show1[int(painter2),i,:]=[254,0,0] show1[int(painter3), i, :] = [0, 0, 254] # save the matrix to fil dir cv2.imwrite(self.data_mat_root + str(Image_ID) + ".jpg", show1) def validation(self, original_IMG, Shifted_IMG, path, Image_ID): #Costmatrix,shift_used = COSTMtrix.matrix_cal_corre_full_version3_2GPU(original_IMG,Shifted_IMG,0) #Costmatrix,shift_used = COSTMtrix.matrix_cal_corre_full_version3_2GPU(original_IMG,Shifted_IMG,0) Costmatrix, shift_used = COSTMtrix.matrix_cal_corre_full_version3_2GPU( original_IMG, Shifted_IMG, 0) #Costmatrix=cv2.blur(Costmatrix,(5,5)) Costmatrix = myfilter.gauss_filter_s(Costmatrix) # smooth matrix # Costmatrix =cv2.GaussianBlur(Costmatrix,(5,5),0) # down sample the materix and up sample #Hm,Wm= Costmatrix.shape #Costmatrix = cv2.resize(Costmatrix, (int(Wm/2),int(Hm/2)), interpolation=cv2.INTER_LINEAR) #Costmatrix = cv2.resize(Costmatrix, (Wm,Hm), interpolation=cv2.INTER_LINEAR) if Clip_matrix_flag == True: Costmatrix = np.clip(Costmatrix, 20, 254) #Costmatrix=self.random_min_clip_by_row(5,30,Costmatrix) #Costmatrix = self.add_lines_to_matrix(Costmatrix) #Costmatrix=np.clip(Costmatrix, 20, 255) # Costmatrix = myfilter.gauss_filter_s(Costmatrix) # smooth matrix #tradition way to find path ##middle_point = PATH.calculate_ave_mid(mat) #path1,path_cost1=PATH.search_a_path(mat,start_point) # get the path and average cost of the path show1 = np.zeros((Costmatrix.shape[0], Costmatrix.shape[1], 3)) show1[:, :, 0] = Costmatrix show1[:, :, 1] = Costmatrix show1[:, :, 2] = Costmatrix for i in range(len(path)): painter = np.clip(path[i], 1, Window_LEN - 2) show1[int(painter), i, :] = show1[int(painter) - 1, i, :] = [254, 254, 254] if Show_nurd_compare == True: start_point = PATH.find_the_starting( Costmatrix) # starting point for path searching path_tradition, pathcost1 = PATH.search_a_path( Costmatrix, start_point) # get the path and average cost of the path #path_tradition=(path_tradition -Window_LEN/2)* Down_sample_F2 +Window_LEN/2 #path_deep,path_cost2=PATH.search_a_path_Deep_Mat2longpath(Costmatrix) # get the path and average cost of the path path_deep, path_cost2 = PATH.search_a_path_GPU( Costmatrix) # get the path and average cost of the path #path_deep=(path_deep -Window_LEN/2)* Down_sample_F2 +Window_LEN/2 path_deep = gaussian_filter1d(path_deep, 3) # smooth the path show1 = np.clip(show1, 1, 190) for i in range(len(path)): painter = np.clip(path[i], 1, Window_LEN - 2) show1[int(painter), i, :] = show1[int(painter) - 1, i, :] = [254, 254, 254] painter2 = np.clip(path_tradition[i], 1, Window_LEN - 2) painter3 = np.clip(path_deep[i], 1, Window_LEN - 2) show1[int(painter2), i, :] = show1[int(painter2) - 1, i, :] = [0, 254, 0] show1[int(painter3), i, :] = show1[int(painter3) - 1, i, :] = [0, 0, 254] # save the matrix to fil dir cv2.imwrite(self.data_mat_root_origin + str(Image_ID) + ".jpg", Costmatrix) cv2.imwrite(self.data_mat_root + str(Image_ID) + ".jpg", show1) # show the signal comparison in visdom if visdom_show_flag == True: x = np.arange(0, len(path)) self.vis_ploter.plot_multi_arrays_append(x, path, title_name=str(Image_ID), legend='truth') self.vis_ploter.plot_multi_arrays_append(x, path_deep, title_name=str(Image_ID), legend='Deep Learning') self.vis_ploter.plot_multi_arrays_append(x, path_tradition, title_name=str(Image_ID), legend='Traditional') # save comparison signals to matlab if Save_matlab_flag == True: self.matlab.buffer_4(Image_ID, path, path_deep, path_tradition) self.matlab.save_mat() pass def generate_NURD(self): #read one from the original #random select one IMG frome the oringinal read_id = 0 Len_steam = 5 steam = np.zeros((Len_steam, self.H, self.W)) # create video buffer num_path, path_len = self.path_DS.path_saving.shape for read_id in range(num_path): OriginalpathDirlist = os.listdir(self.original_root) # sample = random.sample(OriginalpathDirlist, 1) # Sample_path = self.original_root + sample[0] original_IMG = cv2.imread(Sample_path) path = self.path_DS.path_saving[read_id, :] self.W = len(path) original_IMG = cv2.cvtColor(original_IMG, cv2.COLOR_BGR2GRAY) original_IMG = cv2.resize(original_IMG, (self.W, self.H), interpolation=cv2.INTER_AREA) #read the path and Image number from the signal file #get the Id of image which should be poibnt to Image_ID = int( self.path_DS.signals[Save_signal_enum.image_iD.value, read_id]) #get the path #path = signal.resample(path, self.W)#resample the path if NURD_remove_shift_flag == True: path = path - (np.mean(path) - Window_LEN / 2) #path= path*0+ int(Window_LEN/2 ) Dice = int(np.random.random_sample() * 100) if Dice % 2 == 0 or Use_random_NURD == True: path = path * 0 + int(Window_LEN / 2) fact1 = int(np.random.random_sample() * 20) + 20 fact2 = np.random.random_sample() random_NURD = np.random.random_sample( fact1) * 20 - 10 + fact2 * 20 - 10 random_NURD = signal.resample(random_NURD, self.W) #resample the path #random_NURD = np.random.random_sample(self.W)*30-10 + np.random.random_sample()*40-20 random_NURD = gaussian_filter1d(random_NURD, 5) # smooth the path path = path + random_NURD Low_path = int(35 / Down_sample_F2) #Low_path =Low_path.astype(int) Downsample_bias = 35 - Low_path * Down_sample_F2 path = np.clip(path, 0, Window_LEN - 1) # create the shifted image Shifted_IMG = VIDEO_PEOCESS.de_distortion(original_IMG, path, Image_ID, 0) #path = path -0.5* Downsample_bias path = gaussian_filter1d(path, 3) # smooth the path path = np.clip(path, 0, Window_LEN - 1) self.path_DS.path_saving[read_id, :] = path self.path_DS.save() if add_noise_flag == True: #original_IMG = Basic_Operator.add_speckle_or_not(original_IMG) #Shifted_IMG = Basic_Operator.add_speckle_or_not(Shifted_IMG) noise_it = np.random.random_sample() * 100 noise_type = str(self.noise_selector[int(noise_it) % 4]) #noise_type = "gauss_noise" original_IMG = self.noisy(noise_type, original_IMG) noise_it = np.random.random_sample() * 100 noise_type = str(self.noise_selector[int(noise_it) % 4]) Shifted_IMG = self.noisy(noise_type, Shifted_IMG) #Shifted_IMG = self.noisy(noise_type,Shifted_IMG) # save all the result cv2.imwrite(self.data_pair1_root + str(Image_ID) + ".jpg", original_IMG) cv2.imwrite(self.data_pair2_root + str(Image_ID) + ".jpg", Shifted_IMG) ## validation #self.validation(original_IMG,Shifted_IMG,path,Image_ID) #steam[Len_steam-1,:,:] = original_IMG # un-correct #steam[Len_steam-2,:,:] = Shifted_IMG # correct #Costmatrix,shift_used = COSTMtrix.matrix_cal_corre_full_version3_2GPU(original_IMG,Shifted_IMG,0) #Costmatrix = myfilter.gauss_filter_s (Costmatrix) # smooth matrix #show1 = Costmatrix #for i in range ( len(path)): # show1[int(path[i]),i]=254 #cv2.imwrite(self.data_mat_root + str(Image_ID) +".jpg", show1) print("[%s] is processed. test point time is [%f] " % (read_id, 0.1)) #read_id +=1 def generate_overall_shifting(self): #read one from the original #random select one IMG frome the oringinal read_id = 0 Len_steam = 5 #steam=np.zeros((Len_steam,self.H,self.W)) # create video buffer while (1): random_shifting = np.random.random_sample( ) * Overall_shiftting_WinLen #random_shifting = random.random() * Overall_shiftting_WinLen OriginalpathDirlist = os.listdir(self.original_root) # sample = random.sample(OriginalpathDirlist, 1) # Sample_path = self.original_root + sample[0] original_IMG = cv2.imread(Sample_path) original_IMG = cv2.cvtColor(original_IMG, cv2.COLOR_BGR2GRAY) original_IMG = cv2.resize(original_IMG, (self.W, self.H), interpolation=cv2.INTER_AREA) #original_IMG = cv2.resize(original_IMG, (self.W,self.H), interpolation=cv2.INTER_AREA) H, W = original_IMG.shape #read the path and Image number from the signal file #get the Id of image which should be poibnt to Image_ID = int( self.path_DS.signals[Save_signal_enum.image_iD.value, read_id]) #get the path path = self.path_DS.path_saving[read_id, :] #change the signal too self.path_DS.path_saving[read_id, :] = path * 0 + random_shifting path = signal.resample(path, W) * 0 + random_shifting #resample the path #resave the signal # create the shifted image Shifted_IMG = VIDEO_PEOCESS.de_distortion(original_IMG, path, Image_ID, 0) # save all the result cv2.imwrite(self.data_pair1_root + str(Image_ID) + ".jpg", original_IMG) cv2.imwrite(self.data_pair2_root + str(Image_ID) + ".jpg", Shifted_IMG) self.path_DS.save() #self.validation_shift(original_IMG,Shifted_IMG,path,Image_ID) ## validation #steam[Len_steam-1,:,:] = original_IMG # un-correct #steam[Len_steam-2,:,:] = Shifted_IMG # correct #Costmatrix,shift_used = COSTMtrix.matrix_cal_corre_full_version3_2GPU(original_IMG,Shifted_IMG,0) ##Costmatrix = myfilter.gauss_filter_s (Costmatrix) # smooth matrix #show1 = Costmatrix #for i in range ( len(path)): # show1[int(path[i]),i]=254 #cv2.imwrite(self.data_mat_root + str(Image_ID) +".jpg", show1) print("[%s] is processed. test point time is [%f] " % (read_id, 0.1)) read_id += 1 # generate the OCT iamge with combination of NURD and group shifting def generate_NURD_overall_shifting(self): #read one from the original #random select one IMG frome the oringinal read_id = 0 # read pointer initialization Len_steam = 5 # create the buffer for validation steam = np.zeros((Len_steam, self.H, self.W)) # create video buffer growing = 0 while (1): #list all the picture for video generating, ensure the original folder has only one image OriginalpathDirlist = os.listdir(self.original_root) sample = random.sample(OriginalpathDirlist, 1) # ramdom choose the name in folder list Sample_path = self.original_root + sample[ 0] # create the reading path this radom picture original_IMG = cv2.imread(Sample_path) # get this image original_IMG = cv2.cvtColor(original_IMG, cv2.COLOR_BGR2GRAY) # to gray original_IMG = cv2.resize(original_IMG, (self.W, self.H), interpolation=cv2.INTER_LINEAR) #read the path and Image number from the signal file #get the Id of image which should be poibnt to Image_ID = int( self.path_DS.signals[Save_signal_enum.image_iD.value, read_id]) #get the path path = self.path_DS.path_saving[read_id, :] path = signal.resample(path, self.W) #resample the path if NURD_remove_shift_flag == True: path = path - (np.mean(path) - Window_LEN / 2) #path= path*0+ int(Window_LEN/2 ) Dice = int(np.random.random_sample() * 100) if Dice % 2 == 0 or Use_random_NURD == True: path = path * 0 + int(Window_LEN / 2) fact1 = int(np.random.random_sample() * 50) + 20 fact2 = np.random.random_sample() random_NURD = np.random.random_sample(fact1) * 30 - 15 random_NURD = signal.resample(random_NURD, self.W) #resample the path #random_NURD = np.random.random_sample(self.W)*30-10 + np.random.random_sample()*40-20 random_NURD = gaussian_filter1d(random_NURD, 10) # smooth the path path = path + random_NURD # exragene for diaplay #path = (path -np.mean(path))*0.6+np.mean(path) #overall_shifting = Image_ID #overall_shifting = min(overall_shifting,self.W/2) # limit the shifting here, maybe half the lenghth is sufficient for the combination #overall_shifting = min(overall_shifting,self.W/2) # limit the shifting here, maybe half the lenghth is sufficient for the combination random_shifting = np.random.random_sample( ) * Overall_shiftting_WinLen / 2 #Combine the overall shifting with NURD #path = path + overall_shifting path = path + random_shifting # create the shifted image Shifted_IMG = VIDEO_PEOCESS.de_distortion(original_IMG, path, Image_ID, 0) #modify for training ground trueth self.path_DS.path_saving[read_id, :] = self.path_DS.path_saving[ read_id, :] * 0 + random_shifting self.path_DS.save() # add noise to image pair for validation if add_noise_flag == True: noise_type = str(self.noise_selector[int(Image_ID) % 4]) #noise_type = "gauss_noise" original_IMG = self.noisy(noise_type, original_IMG) Shifted_IMG = self.noisy(noise_type, Shifted_IMG) if Image_ID < 100: grower = +np.random.random_sample() * np.random.random_sample( ) * 2 else: grower = +np.random.random_sample() * np.random.random_sample( ) * 1 growing += grower # addistional shift Shifted_IMG = np.roll(Shifted_IMG, int(growing), axis=1) # save all the result cv2.imwrite(self.data_pair1_root + str(Image_ID) + ".jpg", original_IMG) cv2.imwrite(self.data_pair2_root + str(Image_ID) + ".jpg", Shifted_IMG) # save generate information: the NURD and shift used for generating img pair to matlab if Save_matlab_flag == True: self.matlab.buffer_overall_shift_NURD(Image_ID, overall_shifting, path) self.matlab.save_mat_infor_of_over_allshift_with_NURD() self.matlab.save_pkl_infor_of_over_allshift_with_NURD() pass ## validation if validation_flag == True: self.validation_shift(original_IMG, Shifted_IMG, path, Image_ID) #self.validation(original_IMG,Shifted_IMG,path,Image_ID) #steam[Len_steam-1,:,:] = original_IMG # un-correct #steam[Len_steam-2,:,:] = Shifted_IMG # correct #Costmatrix,shift_used = COSTMtrix.matrix_cal_corre_full_version3_2GPU(original_IMG,Shifted_IMG,0) #Costmatrix = myfilter.gauss_filter_s (Costmatrix) # smooth matrix #show1 = Costmatrix #for i in range ( len(path)): # show1[int(path[i]),i]=254 #cv2.imwrite(self.data_mat_root + str(Image_ID) +".jpg", show1) print("[%s] is processed. test point time is [%f] " % (read_id, 0.1)) read_id += 1
class DATA_Generator(object): def __init__(self): self.original_root = "../../saved_original_for_generator/" self.data_pair1_root = "../../saved_pair1/" self.data_pair2_root = "../../saved_pair2/" self.data_mat_root = "../../saved_matrix/" self.data_mat_root_origin = "../../saved_matrix_unprocessed/" self.data_signal_root = "../../saved_stastics_for_generator/" self.H = 1024 self.W = 780 # read the signals just use the existing path self.saved_stastics = MY_ANALYSIS() self.saved_stastics.all_statics_dir = os.path.join(self.data_signal_root, 'signals.pkl') self.path_DS = self.saved_stastics.read_my_signal_results() self.path_DS.all_statics_dir = self.saved_stastics.all_statics_dir #the validation functionfor check the matrix and can also be used for validate the correction result def validation(self,original_IMG,Shifted_IMG,path,Image_ID): #Costmatrix,shift_used = COSTMtrix.matrix_cal_corre_full_version3_2GPU(original_IMG,Shifted_IMG,0) Costmatrix,shift_used = COSTMtrix.matrix_cal_corre_block_version3_3GPU(original_IMG,Shifted_IMG,0) # Costmatrix = myfilter.gauss_filter_s(Costmatrix) # smooth matrix #tradition way to find path start_point= PATH.find_the_starting(Costmatrix) # starting point for path searching #path_tradition,pathcost1 = PATH.search_a_path(Costmatrix,start_point) # get the path and average cost of the path #path_deep,path_cost2=PATH.search_a_path_Deep_Mat2longpath(Costmatrix) # get the path and average cost of the path path_deep,path_cost2=PATH.search_a_path_deep_multiscal_small_window(Costmatrix) # get the path and average cost of the path path_deep = gaussian_filter1d(path_deep,3) # smooth the path ##middle_point = PATH.calculate_ave_mid(mat) #path1,path_cost1=PATH.search_a_path(mat,start_point) # get the path and average cost of the path show1 = Costmatrix cv2.imwrite(self.data_mat_root_origin + str(Image_ID) +".jpg", show1) for i in range ( len(path)): painter = min(path[i],Window_LEN-1) #painter2= min(path_tradition[i],Window_LEN-1) painter3 = min(path_deep[i],Window_LEN-1) show1[int(painter),i]=128 #show1[int(painter2),i]=128 show1[int(painter3),i]=254 cv2.imwrite( self.data_mat_root + str(Image_ID) +".jpg", show1) def generate_NURD(self): #read one from the original #random select one IMG frome the oringinal read_id = 0 Len_steam =5 steam=np.zeros((Len_steam,self.H,self.W)) # create video buffer while (1): OriginalpathDirlist = os.listdir(self.original_root) # sample = random.sample(OriginalpathDirlist, 1) # Sample_path = self.original_root + sample[0] original_IMG = cv2.imread(Sample_path) original_IMG = cv2.cvtColor(original_IMG, cv2.COLOR_BGR2GRAY) original_IMG = cv2.resize(original_IMG, (self.W,self.H), interpolation=cv2.INTER_AREA) #read the path and Image number from the signal file #get the Id of image which should be poibnt to Image_ID = int( self.path_DS.signals[Save_signal_enum.image_iD.value, read_id]) #get the path path = self.path_DS.path_saving[read_id,:] path = signal.resample(path, self.W)#resample the path # create the shifted image Shifted_IMG = VIDEO_PEOCESS.de_distortion(original_IMG,path,Image_ID,0) # save all the result cv2.imwrite(self.data_pair1_root + str(Image_ID) +".jpg", original_IMG) cv2.imwrite(self.data_pair2_root + str(Image_ID) +".jpg", Shifted_IMG) ## validation self.validation(original_IMG,Shifted_IMG,path,Image_ID) #steam[Len_steam-1,:,:] = original_IMG # un-correct #steam[Len_steam-2,:,:] = Shifted_IMG # correct #Costmatrix,shift_used = COSTMtrix.matrix_cal_corre_full_version3_2GPU(original_IMG,Shifted_IMG,0) #Costmatrix = myfilter.gauss_filter_s (Costmatrix) # smooth matrix #show1 = Costmatrix #for i in range ( len(path)): # show1[int(path[i]),i]=254 #cv2.imwrite(self.data_mat_root + str(Image_ID) +".jpg", show1) print ("[%s] is processed. test point time is [%f] " % (read_id ,0.1)) read_id +=1 def generate_overall_shifting(self): #read one from the original #random select one IMG frome the oringinal read_id = 0 Len_steam =5 #steam=np.zeros((Len_steam,self.H,self.W)) # create video buffer while (1): random_shifting = random.random() * Overall_shiftting_WinLen OriginalpathDirlist = os.listdir(self.original_root) # sample = random.sample(OriginalpathDirlist, 1) # Sample_path = self.original_root + sample[0] original_IMG = cv2.imread(Sample_path) original_IMG = cv2.cvtColor(original_IMG, cv2.COLOR_BGR2GRAY) #original_IMG = cv2.resize(original_IMG, (self.W,self.H), interpolation=cv2.INTER_AREA) H,W = original_IMG.shape #read the path and Image number from the signal file #get the Id of image which should be poibnt to Image_ID = int( self.path_DS.signals[Save_signal_enum.image_iD.value, read_id]) #get the path path = self.path_DS.path_saving[read_id,:] #change the signal too self.path_DS.path_saving[read_id,:] = path* 0 + random_shifting path = signal.resample(path, W)*0 + random_shifting #resample the path #resave the signal # create the shifted image Shifted_IMG = VIDEO_PEOCESS.de_distortion(original_IMG,path,Image_ID,0) # save all the result cv2.imwrite(self.data_pair1_root + str(Image_ID) +".jpg", original_IMG) cv2.imwrite(self.data_pair2_root + str(Image_ID) +".jpg", Shifted_IMG) self.path_DS.save() self.validation(original_IMG,Shifted_IMG,path,Image_ID) ## validation #steam[Len_steam-1,:,:] = original_IMG # un-correct #steam[Len_steam-2,:,:] = Shifted_IMG # correct #Costmatrix,shift_used = COSTMtrix.matrix_cal_corre_full_version3_2GPU(original_IMG,Shifted_IMG,0) ##Costmatrix = myfilter.gauss_filter_s (Costmatrix) # smooth matrix #show1 = Costmatrix #for i in range ( len(path)): # show1[int(path[i]),i]=254 #cv2.imwrite(self.data_mat_root + str(Image_ID) +".jpg", show1) print ("[%s] is processed. test point time is [%f] " % (read_id ,0.1)) read_id +=1 def generate_NURD_overall_shifting(self): pass
class DATA_augmentor(object): def __init__(self): self.original_root = "../../saved_original_for_generator/" self.data_pair1_root = "../../saved_pair1/" self.data_pair2_root = "../../saved_pair2/" self.data_mat_root = "../../saved_matrix/" self.data_mat_root_origin = "../../saved_matrix_unprocessed/" self.data_mat_root_augmented = "../../saved_matrix_augmented/" self.data_signal_root = "../../saved_stastics_for_generator/" if not os.path.exists(self.data_mat_root_augmented): os.mkdir(self.data_mat_root_augmented) print("Directory ", self.data_mat_root_augmented, " Created ") else: print("Directory ", self.data_mat_root_augmented, " already exists") self.H = 1024 self.W = 780 # read the signals just use the existing path self.saved_stastics = MY_ANALYSIS() self.saved_stastics.all_statics_dir = os.path.join( self.data_signal_root, 'signals.pkl') self.path_DS = self.saved_stastics.read_my_signal_results() self.path_DS.all_statics_dir = self.saved_stastics.all_statics_dir #the validation functionfor check the matrix and can also be used for validate the correction result def validation(self, original_IMG, Shifted_IMG, path, Image_ID): Costmatrix, shift_used = COSTMtrix.matrix_cal_corre_full_version3_2GPU( original_IMG, Shifted_IMG, 0) # Costmatrix = myfilter.gauss_filter_s(Costmatrix) # smooth matrix #tradition way to find path start_point = PATH.find_the_starting( Costmatrix) # starting point for path searching #path_tradition,pathcost1 = PATH.search_a_path(Costmatrix,start_point) # get the path and average cost of the path path_deep, path_cost2 = PATH.search_a_path_Deep_Mat2longpath( Costmatrix) # get the path and average cost of the path path_deep = gaussian_filter1d(path_deep, 3) # smooth the path ##middle_point = PATH.calculate_ave_mid(mat) #path1,path_cost1=PATH.search_a_path(mat,start_point) # get the path and average cost of the path show1 = Costmatrix cv2.imwrite(self.data_mat_root_origin + str(Image_ID) + ".jpg", show1) for i in range(len(path)): painter = min(path[i], Window_LEN - 1) #painter2= min(path_tradition[i],Window_LEN-1) painter3 = min(path_deep[i], Window_LEN - 1) show1[int(painter), i] = 128 #show1[int(painter2),i]=128 show1[int(painter3), i] = 254 cv2.imwrite(self.data_mat_root + str(Image_ID) + ".jpg", show1) def noisy(self, noise_typ, image): if noise_typ == "gauss": row, col = image.shape mean = 0 var = 50 sigma = var**0.5 gauss = np.random.normal(mean, sigma, (row, col)) gauss = gauss.reshape(row, col) noisy = image + gauss return noisy elif noise_typ == "s&p": row, col = image.shape s_vs_p = 0.5 amount = 0.004 out = np.copy(image) # Salt mode num_salt = np.ceil(amount * image.size * s_vs_p) coords = [ np.random.randint(0, i - 1, int(num_salt)) for i in image.shape ] out[coords] = 1 # Pepper mode num_pepper = np.ceil(amount * image.size * (1. - s_vs_p)) coords = [ np.random.randint(0, i - 1, int(num_pepper)) for i in image.shape ] out[coords] = 0 return out elif noise_typ == "poisson": vals = len(np.unique(image)) vals = 2**np.ceil(np.log2(vals)) noisy = np.random.poisson(image * vals) / float(vals) return noisy elif noise_typ == "speckle": row, col = image.shape gauss = np.random.randn(row, col) gauss = gauss.reshape(row, col) noisy = image + image * gauss return noisy #def add_gaussian_noise(self,X_imgs): # gaussian_noise_imgs = [] # row, col, _ = X_imgs[0].shape # # Gaussian distribution parameters # mean = 0 # var = 0.1 # sigma = var ** 0.5 # for X_img in X_imgs: # gaussian = np.random.random((row, col, 1)).astype(np.float32) # gaussian = np.concatenate((gaussian, gaussian, gaussian), axis = 2) # gaussian_img = cv2.addWeighted(X_img, 0.75, 0.25 * gaussian, 0.25, 0) # gaussian_noise_imgs.append(gaussian_img) # gaussian_noise_imgs = np.array(gaussian_noise_imgs, dtype = np.float32) # return gaussian_noise_imgs def augment_gauss_noise(self): #read one from the original noise_selector = ["gauss", "s&p", "poisson", "speckle"] for img in os.listdir(self.data_mat_root_origin): a, b = os.path.splitext(img) if b == ".jpg": original_IMG = cv2.imread(self.data_mat_root_origin + img) original_IMG = cv2.cvtColor(original_IMG, cv2.COLOR_BGR2GRAY) Gauss_IMG = self.noisy(noise_selector[int(a) % 4], original_IMG) # save all the result #cv2.imwrite(self.data_pair1_root + str(Image_ID) +".jpg", original_IMG) cv2.imwrite(self.data_mat_root_augmented + a + ".jpg", Gauss_IMG) print("[%s] is processed. test point time is [%f] " % (a, 0.1)) def augment_blur(self): #read one from the original noise_selector = ["gauss", "s&p", "poisson", "speckle"] for img in os.listdir(self.data_mat_root_origin): a, b = os.path.splitext(img) if b == ".jpg": original_IMG = cv2.imread(self.data_mat_root_origin + img) original_IMG = cv2.cvtColor(original_IMG, cv2.COLOR_BGR2GRAY) Blur_IMG = cv2.blur(original_IMG, (5, 5)) # save all the result #cv2.imwrite(self.data_pair1_root + str(Image_ID) +".jpg", original_IMG) cv2.imwrite(self.data_mat_root_augmented + a + ".jpg", Blur_IMG) print("[%s] is processed. test point time is [%f] " % (a, 0.1)) def add_lines_to_matrix(self, matrix): value = 128 H, W = matrix.shape line_positions = np.arange(0, W - 2 * H, H) for lines in line_positions: for i in np.arange(0, H): matrix[i, lines + i] = value return matrix def augment_add_lines(self): #read one from the original for img in os.listdir(self.data_mat_root_origin): a, b = os.path.splitext(img) if b == ".jpg": original_IMG = cv2.imread(self.data_mat_root_origin + img) original_IMG = cv2.cvtColor(original_IMG, cv2.COLOR_BGR2GRAY) Add_line_IMG = self.add_lines_to_matrix(original_IMG) # save all the result #cv2.imwrite(self.data_pair1_root + str(Image_ID) +".jpg", original_IMG) cv2.imwrite(self.data_mat_root_augmented + a + ".jpg", Add_line_IMG) print("[%s] is processed. test point time is [%f] " % (a, 0.1)) def augment_clip(self): #read one from the original for img in os.listdir(self.data_mat_root_origin): a, b = os.path.splitext(img) if b == ".jpg": original_IMG = cv2.imread(self.data_mat_root_origin + img) original_IMG = cv2.cvtColor(original_IMG, cv2.COLOR_BGR2GRAY) clip_line_IMG = np.clip(original_IMG, 20, 255) # save all the result #cv2.imwrite(self.data_pair1_root + str(Image_ID) +".jpg", original_IMG) cv2.imwrite(self.data_mat_root_augmented + a + ".jpg", clip_line_IMG) print("[%s] is processed. test point time is [%f] " % (a, 0.1))