def test(self, x_test, y_test, data_start, batch_size=10, threshold=0.5, save_path = None): def mkdir(path): # 去除首位空格 path = path.strip() # 去除尾部 \ 符號 path = path.rstrip("\\") # 判斷路徑是否存在 # 存在 True # 不存在 False isExists = os.path.exists(path) # 判斷結果 if not isExists: # 如果不存在則建立目錄 print("Building the file.") # 建立目錄操作函式 os.makedirs(path) return True else: # 如果目錄存在則不建立,並提示目錄已存在 print("File is existing.") return False # if x_test.ndim == 3: # x_test = np.expand_dims(x_test, axis=-1) # if y_test.ndim == 3: # y_test = np.expand_dims(y_test, axis=-1) if save_path == None: save_path = self.name # with open(".\\result\\" + self.name + 'estimator_weights.json', 'r', encoding='utf-8') as f: # output = json.load(f) # estimator_weights = output['estimator_weights'] file_name = [] for order in range(self.n_estimators): file_name.append(self.name + "_" + str(order) + ".h5") print(file_name[-1]) y_predict = np.zeros(y_test.shape, dtype=np.float32) c = 0 for model_file in file_name: # if estimator_weights[c] == 0: # c = c + 1 # continue # c = c + 1 print("Read model weight {}".format(".\\result\\model_record\\" + model_file)) self.base_estimator.load_weights(".\\result\\model_record\\" + model_file) result = self.base_estimator.predict(x_test, batch_size=batch_size) / len(file_name) # result = self.estimator.predict(x_test, batch_size=batch_size) / np.sum(estimator_weights) print("Result = {}".format(np.sum(result))) y_predict = y_predict + result print("Check the threshold.") y_output = postprocessing.check_threshold(y_predict, size=self.output_size, threshold=threshold) print("Estimate.") iou = estimate.IOU(y_test, y_output, self.output_size[0], len(y_test)) (precision, recall, F1) = estimate.F1_estimate(y_test, y_output, self.output_size[0], len(y_test)) avr_iou = np.sum(iou) / len(y_test) avr_precision = np.sum(precision) / len(y_test) avr_recall = np.sum(recall) / len(y_test) avr_F1 = np.sum(F1) / len(y_test) print("Average IOU:{}".format(avr_iou)) print('Save the result.') mkdir(".\\result\image\\" + self.name) for index in range(len(y_test)): img_save = y_output[index] * 255 cv2.imwrite(".\\result\image\\" + self.name + '\\{}.png'.format(data_start + index), img_save) print('Save image:{}'.format(data_start + index)) ex_iou = excel.Excel() ex_iou.write_loss_and_iou(save_path, 0, 0, iou, avr_iou) ex_iou.write_excel("e1", "precision", vertical=True) ex_iou.write_excel("e2", precision, vertical=True) ex_iou.write_excel("f1", "avr_precision", vertical=True) ex_iou.write_excel("f2", avr_precision, vertical=True) ex_iou.write_excel("g1", "recall", vertical=True) ex_iou.write_excel("g2", recall, vertical=True) ex_iou.write_excel("h1", "avr_recall", vertical=True) ex_iou.write_excel("h2", avr_recall, vertical=True) ex_iou.write_excel("i1", "F1", vertical=True) ex_iou.write_excel("i2", F1, vertical=True) ex_iou.write_excel("j1", "avr_F1", vertical=True) ex_iou.write_excel("j2", avr_F1, vertical=True) ex_iou.save_excel(file_name=".\\result\data\\" + save_path + "_iou.xlsx") ex_iou.close_excel()
def test_weights(self, x_train, y_train, x_test, y_test, data_start, estimator_path, save_path, threshold=0.5, estimator_weights=None): def mkdir(path): # 去除首位空格 path = path.strip() # 去除尾部 \ 符號 path = path.rstrip("\\") # 判斷路徑是否存在 # 存在 True # 不存在 False isExists = os.path.exists(path) # 判斷結果 if not isExists: # 如果不存在則建立目錄 print("Building the file.") # 建立目錄操作函式 os.makedirs(path) return True else: # 如果目錄存在則不建立,並提示目錄已存在 print("File is existing.") return False base_estimator = self.base_estimator # if x_test.ndim == 3: # x_test = np.expand_dims(x_test, axis=-1) # if y_test.ndim == 3: # y_test = np.expand_dims(y_test, axis=-1) if estimator_weights == None: estimator_weights = self.update_weight(x_train, y_train, estimator_path, threshold=0.5) dict_estimators_weights = {} dict_estimators_weights['estimator_weights'] = estimator_weights # with open(".\\result\\" + self.name + '_estimator_weights.json', 'w', encoding='utf-8') as f: # json.dump(dict_estimators_weights, f) y_predict = np.zeros(y_test.shape, dtype=np.float32) count = 0 for model_file in estimator_path: print("Read model weight {}".format(".\\result\\model_record\\" + model_file + ".h5")) base_estimator.load_weights(".\\result\\model_record\\" + model_file + ".h5") result = base_estimator.predict( x_test, batch_size=self.batch_size) * estimator_weights[count] y_predict = y_predict + result count = count + 1 print("Check the threshold.") y_output = postprocessing.check_threshold(y_predict, size=self.output_size, threshold=threshold) print("Estimate.") iou = estimate.IOU(y_test, y_output, self.output_size[0], len(y_test)) (precision, recall, F1) = estimate.F1_estimate(y_test, y_output, self.output_size[0], len(y_test)) avr_iou = np.sum(iou) / len(y_test) avr_precision = np.sum(precision) / len(y_test) avr_recall = np.sum(recall) / len(y_test) avr_F1 = np.sum(F1) / len(y_test) print("Average IOU:{}".format(avr_iou)) print('Save the result.') mkdir(".\\result\image\\" + save_path) for index in range(len(y_test)): img_save = y_output[index] * 255 cv2.imwrite( ".\\result\image\\" + save_path + '\\{}.png'.format(data_start + index), img_save) print('Save image:{}'.format(data_start + index)) ex_iou = excel.Excel() ex_iou.write_loss_and_iou(save_path, 0, 0, iou, avr_iou) ex_iou.write_excel("e1", "precision", vertical=True) ex_iou.write_excel("e2", precision, vertical=True) ex_iou.write_excel("f1", "avr_precision", vertical=True) ex_iou.write_excel("f2", avr_precision, vertical=True) ex_iou.write_excel("g1", "recall", vertical=True) ex_iou.write_excel("g2", recall, vertical=True) ex_iou.write_excel("h1", "avr_recall", vertical=True) ex_iou.write_excel("h2", avr_recall, vertical=True) ex_iou.write_excel("i1", "F1", vertical=True) ex_iou.write_excel("i2", F1, vertical=True) ex_iou.write_excel("j1", "avr_F1", vertical=True) ex_iou.write_excel("j2", avr_F1, vertical=True) ex_iou.save_excel(file_name=".\\result\data\\" + save_path + "_AdaBWeightiou.xlsx") ex_iou.close_excel()
def test_IB(self, x_train, y_train, x_test, y_test, data_start, estimator_path, save_path, train_batch, threshold=0.5): def mkdir(path): # 去除首位空格 path = path.strip() # 去除尾部 \ 符號 path = path.rstrip("\\") # 判斷路徑是否存在 # 存在 True # 不存在 False isExists = os.path.exists(path) # 判斷結果 if not isExists: # 如果不存在則建立目錄 print("Building the file.") # 建立目錄操作函式 os.makedirs(path) return True else: # 如果目錄存在則不建立,並提示目錄已存在 print("File is existing.") return False base_estimator = self.base_estimator # if x_test.ndim == 3: # x_test = np.expand_dims(x_test, axis=-1) # if y_test.ndim == 3: # y_test = np.expand_dims(y_test, axis=-1) y_predict = np.zeros(y_test.shape, dtype=np.float32) estimator_weights = np.zeros(len(estimator_path)) # estimator_weights = self.update_weight(x_train, y_train, estimator_path, threshold=0.5) print("Start IB training processing.") for index in range(len(x_train) // train_batch): print("Data for training {}".format(index + 1)) if index > len(x_train) // train_batch: x = x_train[index * train_batch:] y = y_train[index * train_batch:] else: x = x_train[index * train_batch:(index + 1) * train_batch] y = y_train[index * train_batch:(index + 1) * train_batch] print("x {}".format(x.shape)) estomator_weights_update = self.update_weight(x, y, estimator_path, threshold=0.5) if index == 0: estimator_weights = estomator_weights_update else: estimator_weights = (estimator_weights + index * estomator_weights_update) / (index + 1) estimator_weights = estimator_weights / np.sum(estimator_weights) print("Start predict.") count = 0 for model_file in estimator_path: print("Read model weight {}".format(".\\result\\model_record\\" + model_file + ".h5")) base_estimator.load_weights(".\\result\\model_record\\" + model_file + ".h5") result = base_estimator.predict( x_test, batch_size=self.batch_size) * estimator_weights[count] y_predict = y_predict + result count = count + 1 print("Check the threshold.") y_output = postprocessing.check_threshold(y_predict, size=self.output_size, threshold=threshold) print("Estimate.") iou = estimate.IOU(y_test, y_output, self.output_size[0], len(y_test)) (precision, recall, F1) = estimate.F1_estimate(y_test, y_output, self.output_size[0], len(y_test)) avr_iou = np.sum(iou) / len(y_test) avr_precision = np.sum(precision) / len(y_test) avr_recall = np.sum(recall) / len(y_test) avr_F1 = np.sum(F1) / len(y_test) print("Average IOU:{}".format(avr_iou)) print('Save the result.') mkdir(".\\result\image\\" + save_path) for index in range(len(y_test)): img_save = y_output[index] * 255 cv2.imwrite( ".\\result\image\\" + save_path + '\\{}.png'.format(data_start + index), img_save) print('Save image:{}'.format(data_start + index)) ex_iou = excel.Excel() ex_iou.write_loss_and_iou(save_path, 0, 0, iou, avr_iou) ex_iou.write_excel("e1", "precision", vertical=True) ex_iou.write_excel("e2", precision, vertical=True) ex_iou.write_excel("f1", "avr_precision", vertical=True) ex_iou.write_excel("f2", avr_precision, vertical=True) ex_iou.write_excel("g1", "recall", vertical=True) ex_iou.write_excel("g2", recall, vertical=True) ex_iou.write_excel("h1", "avr_recall", vertical=True) ex_iou.write_excel("h2", avr_recall, vertical=True) ex_iou.write_excel("i1", "F1", vertical=True) ex_iou.write_excel("i2", F1, vertical=True) ex_iou.write_excel("j1", "avr_F1", vertical=True) ex_iou.write_excel("j2", avr_F1, vertical=True) ex_iou.save_excel(file_name=".\\result\data\\" + save_path + "_iou.xlsx") ex_iou.close_excel()
def test(self, x_test, y_test, data_start, estimator_path, save_path, batch_size=10, threshold=0.5): def mkdir(path): # 去除首位空格 path = path.strip() # 去除尾部 \ 符號 path = path.rstrip("\\") # 判斷路徑是否存在 # 存在 True # 不存在 False isExists = os.path.exists(path) # 判斷結果 if not isExists: # 如果不存在則建立目錄 print("Building the file.") # 建立目錄操作函式 os.makedirs(path) return True else: # 如果目錄存在則不建立,並提示目錄已存在 print("File is existing.") return False base_estimator = self.base_estimator # if x_test.ndim == 3: # x_test = np.expand_dims(x_test, axis=-1) # if y_test.ndim == 3: # y_test = np.expand_dims(y_test, axis=-1) base_estimator.load_weights(".\\result\\model_record\\" + estimator_path + ".h5") y_predict = base_estimator.predict(x_test, batch_size=batch_size) print("Check the threshold.\ny_test.shape = {}".format(y_test.shape)) y_output = postprocessing.check_threshold(y_predict, size=self.output_size, threshold=threshold) print("Estimate.") iou = estimate.IOU(y_test, y_output, self.output_size[0], len(y_test)) (precision, recall, F1) = estimate.F1_estimate(y_test, y_output, self.output_size[0], len(y_test)) avr_iou = np.sum(iou) / len(y_test) avr_precision = np.sum(precision) / len(y_test) avr_recall = np.sum(recall) / len(y_test) avr_F1 = np.sum(F1) / len(y_test) print("Average IOU:{}".format(avr_iou)) print('Save the result.') mkdir(".\\result\image\\" + save_path) for index in range(len(y_test)): img_save = y_output[index] * 255 cv2.imwrite( ".\\result\image\\" + save_path + '\\{}.png'.format(data_start + index), img_save) print('Save image:{}'.format(data_start + index)) ex_iou = excel.Excel() ex_iou.write_loss_and_iou(save_path, 0, 0, iou, avr_iou) ex_iou.write_excel("e1", "precision", vertical=True) ex_iou.write_excel("e2", precision, vertical=True) ex_iou.write_excel("f1", "avr_precision", vertical=True) ex_iou.write_excel("f2", avr_precision, vertical=True) ex_iou.write_excel("g1", "recall", vertical=True) ex_iou.write_excel("g2", recall, vertical=True) ex_iou.write_excel("h1", "avr_recall", vertical=True) ex_iou.write_excel("h2", avr_recall, vertical=True) ex_iou.write_excel("i1", "F1", vertical=True) ex_iou.write_excel("i2", F1, vertical=True) ex_iou.write_excel("j1", "avr_F1", vertical=True) ex_iou.write_excel("j2", avr_F1, vertical=True) ex_iou.save_excel(file_name=".\\result\data\\" + save_path + "_iou.xlsx") ex_iou.close_excel()