def ModelJSPH(weights_list=None, is_dismap=True, data_type='test', store_path=r''): model_root = r'/home/zhangyihong/Documents/ProstateECE/Model' data_root = r'/home/zhangyihong/Documents/ProstateECE/NPYNoDivide' device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') input_shape = (192, 192) batch_size = 2 bc = BinaryClassification(store_folder=store_path, store_format='eps') if is_dismap: from SYECE.model import ResNeXt model_folder = model_root + '/ResNeXt_CBAM_CV_20200814' else: from SYECE.ModelWithoutDis import ResNeXt model_folder = model_root + '/ResNeXt_CBAM_CV_20200820' spliter = DataSpliter() sub_list = spliter.LoadName(data_root + '/{}-name.csv'.format(data_type)) if data_type == 'test': data = DataManager(sub_list=sub_list) data.AddOne(Image2D(data_root + '/T2Slice/Test', shape=input_shape)) data.AddOne(Image2D(data_root + '/AdcSlice/Test', shape=input_shape)) data.AddOne(Image2D(data_root + '/DwiSlice/Test', shape=input_shape)) if is_dismap: data.AddOne( Image2D(data_root + '/DistanceMap/Test', shape=input_shape, is_roi=True)) data.AddOne(Label(data_root + '/label.csv', label_tag='Negative'), is_input=False) else: data.AddOne(Label(data_root + '/label.csv', label_tag='Positive'), is_input=False) data_loader = DataLoader(data, batch_size=batch_size, shuffle=False) else: data = DataManager(sub_list=sub_list) data.AddOne(Image2D(data_root + '/T2Slice', shape=input_shape)) data.AddOne(Image2D(data_root + '/AdcSlice', shape=input_shape)) data.AddOne(Image2D(data_root + '/DwiSlice', shape=input_shape)) if is_dismap: data.AddOne( Image2D(data_root + '/DistanceMap', shape=input_shape, is_roi=True)) data.AddOne(Label(data_root + '/label.csv', label_tag='Negative'), is_input=False) else: data.AddOne(Label(data_root + '/label.csv', label_tag='Positive'), is_input=False) data_loader = DataLoader(data, batch_size=batch_size, shuffle=False) cv_folder_list = [ one for one in IterateCase(model_folder, only_folder=True, verbose=0) ] cv_pred_list, cv_label_list = [], [] for cv_index, cv_folder in enumerate(cv_folder_list): model = ResNeXt(3, 2).to(device) if weights_list is None: one_fold_weights_list = [ one for one in IterateCase(cv_folder, only_folder=False, verbose=0) if one.is_file() ] one_fold_weights_list = sorted( one_fold_weights_list, key=lambda x: os.path.getctime(str(x))) weights_path = one_fold_weights_list[-1] else: weights_path = weights_list[cv_index] print(weights_path.name) model.load_state_dict(torch.load(str(weights_path))) pred_list, label_list = [], [] model.eval() for inputs, outputs in data_loader: inputs = MoveTensorsToDevice(inputs, device) outputs = MoveTensorsToDevice(outputs, device) preds = model(*inputs)[:, 1] if is_dismap: pred_list.extend( (1 - preds).cpu().data.numpy().squeeze().tolist()) label_list.extend(( 1 - outputs).cpu().data.numpy().astype(int).squeeze().tolist()) else: pred_list.extend((preds).cpu().data.numpy().squeeze().tolist()) label_list.extend(( outputs).cpu().data.numpy().astype(int).squeeze().tolist()) # if cv_index == 1: # for num, pred in enumerate(pred_list): # print(num, pred) cv_pred_list.append(pred_list) cv_label_list.append(label_list) del model, weights_path cv_pred = np.array(cv_pred_list) cv_label = np.array(cv_label_list) mean_pred = np.mean(cv_pred, axis=0) mean_label = np.mean(cv_label, axis=0) bc.Run(mean_pred.tolist(), mean_label.astype(int).tolist()) return mean_pred, mean_label,
def EnsembleTrain(): torch.autograd.set_detect_anomaly(True) device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') input_shape = (192, 192) total_epoch = 10000 batch_size = 24 model_folder = MakeFolder(model_root + '/ResNeXt_CBAM_CV_20201130_BinaryAttenMap') CopyFile('/home/zhangyihong/SSHProject/ProstateECE/SYECE/model.py', model_folder / 'model.py') param_config = { RotateTransform.name: {'theta': ['uniform', -10, 10]}, ShiftTransform.name: {'horizontal_shift': ['uniform', -0.05, 0.05], 'vertical_shift': ['uniform', -0.05, 0.05]}, ZoomTransform.name: {'horizontal_zoom': ['uniform', 0.95, 1.05], 'vertical_zoom': ['uniform', 0.95, 1.05]}, FlipTransform.name: {'horizontal_flip': ['choice', True, False]}, BiasTransform.name: {'center': ['uniform', -1., 1., 2], 'drop_ratio': ['uniform', 0., 1.]}, NoiseTransform.name: {'noise_sigma': ['uniform', 0., 0.03]}, ContrastTransform.name: {'factor': ['uniform', 0.8, 1.2]}, GammaTransform.name: {'gamma': ['uniform', 0.8, 1.2]}, ElasticTransform.name: ['elastic', 1, 0.1, 256] } spliter = DataSpliter() cv_generator = spliter.SplitLabelCV(data_root + '/ece.csv', store_root=model_folder) for cv_index, (sub_train, sub_val) in enumerate(cv_generator): sub_model_folder = MakeFolder(model_folder / 'CV_{}'.format(cv_index)) train_loader, train_batches = _GetLoader(sub_train, param_config, input_shape, batch_size, True) val_loader, val_batches = _GetLoader(sub_val, param_config, input_shape, batch_size, True) model = ResNeXt(3, 2).to(device) model.apply(HeWeightInit) optimizer = torch.optim.Adam(model.parameters(), lr=0.001) loss1 = torch.nn.NLLLoss() scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, mode='min', patience=10, factor=0.5, verbose=True) early_stopping = EarlyStopping(store_path=str(sub_model_folder / '{}-{:.6f}.pt'), patience=50, verbose=True) writer = SummaryWriter(log_dir=str(sub_model_folder / 'log'), comment='Net') for epoch in range(total_epoch): train_loss, val_loss = 0., 0. model.train() pred_list, label_list = [], [] for ind, (inputs, outputs) in enumerate(train_loader): optimizer.zero_grad() inputs = MoveTensorsToDevice(inputs, device) outputs = MoveTensorsToDevice(outputs, device) preds = model(*inputs) loss = loss1(preds, outputs.long()) loss.backward() optimizer.step() train_loss += loss.item() pred_list.extend(preds[:, 1].cpu().data.numpy().tolist()) label_list.extend(outputs.cpu().data.numpy().tolist()) train_auc = roc_auc_score(label_list, pred_list) model.eval() pred_list, label_list = [], [] with torch.no_grad(): for ind, (inputs, outputs) in enumerate(val_loader): inputs = MoveTensorsToDevice(inputs, device) outputs = MoveTensorsToDevice(outputs, device) preds = model(*inputs) loss = loss1(preds, outputs.long()) val_loss += loss.item() pred_list.extend(preds[:, 1].cpu().data.numpy().tolist()) label_list.extend(outputs.cpu().data.numpy().tolist()) val_auc = roc_auc_score(label_list, pred_list) # Save Tensor Board for index, (name, param) in enumerate(model.named_parameters()): if 'bn' not in name: writer.add_histogram(name + '_data', param.cpu().data.numpy(), epoch + 1) writer.add_scalars('Loss', {'train_loss': train_loss / train_batches, 'val_loss': val_loss / val_batches}, epoch + 1) writer.add_scalars('Auc', {'train_auc': train_auc, 'val_auc': val_auc}, epoch + 1) print('Epoch {}: loss: {:.3f}, val-loss: {:.3f}, auc: {:.3f}, val-auc: {:.3f}'.format( epoch + 1, train_loss / train_batches, val_loss / val_batches, train_auc, val_auc )) scheduler.step(val_loss) early_stopping(val_loss, model, (epoch + 1, val_loss)) if early_stopping.early_stop: print("Early stopping") break writer.flush() writer.close() del writer, optimizer, scheduler, early_stopping, model
def ModelTest(data_folder, model_folder, case_name, weights_list=None): # label_df = pd.read_csv(r'/home/zhangyihong/Documents/ProstateECE/NPYNoDivide/ece.csv', index_col='case') label_df = pd.read_csv(r'/home/zhangyihong/Documents/ProstateECE/SUH_Dwi1500/label.csv', index_col='case') device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') cv_folder_list = [one for one in IterateCase(model_folder, only_folder=True, verbose=0)] cv_pred_list, cv_label_list = [], [] label_list, case_list = [], [] for cv_index, cv_folder in enumerate(cv_folder_list): model = ResNeXt(3, 2).to(device) # model = ModelwithDis(3, 2).to(device) if weights_list is None: one_fold_weights_list = [one for one in IterateCase(cv_folder, only_folder=False, verbose=0) if one.is_file()] one_fold_weights_list = sorted(one_fold_weights_list, key=lambda x: os.path.getctime(str(x))) weights_path = one_fold_weights_list[-1] else: weights_path = weights_list[cv_index] print(weights_path.name) model.load_state_dict(torch.load(str(weights_path))) all_case_pred_list = [] model.eval() for case in case_name: if case == 'DSR^dai shou rong_slice16': continue # case_all_slice_list = Run(os.path.join(data_folder, case[: case.index('_slice')]), case) case_all_slice_list = Run(os.path.join(data_folder, case[: case.index('_-_slice')]), case) all_slice_preds_list = [] if cv_index == 0: # label_list.append((label_df.loc[case])['ece']) label_list.append((label_df.loc[case[: case.index('_-_slice')]])['label']) case_list.append(case[: case.index('_-_slice')]) print('in cv {}, predict {}'.format(cv_index, case)) # predict for each slice for case_one_slice_list in case_all_slice_list: distance_map = FindRegion(case_one_slice_list[3], case_one_slice_list[4]).astype(np.float32) # attention map # distance_map = np.where(distance_map >= 0.1, 1, 0).astype(np.float32) # binary attention map # distance_map = ExtractEdge(np.squeeze(case_one_slice_list[3]), kernel=np.ones((7, 7))).astype(np.float32) # prostate boundary # distance_map = case_one_slice_list[4] # pca roi t2, dwi, adc = case_one_slice_list[0], case_one_slice_list[1], case_one_slice_list[2] # feature = case_one_slice_list[5].astype(np.float32) # inputs_list = MoveTensorsToDevice([torch.tensor(t2[np.newaxis, np.newaxis, ...]), # torch.tensor(dwi[np.newaxis, np.newaxis, ...]), # torch.tensor(adc[np.newaxis, np.newaxis, ...]), # torch.tensor(distance_map[np.newaxis, np.newaxis, ...]), # torch.tensor(feature[np.newaxis, ...])], # device) inputs_list = MoveTensorsToDevice([torch.tensor(t2[np.newaxis, np.newaxis, ...]), torch.tensor(dwi[np.newaxis, np.newaxis, ...]), torch.tensor(adc[np.newaxis, np.newaxis, ...]), torch.tensor(distance_map[np.newaxis, np.newaxis, ...])], device) # prediction for a slice preds = model(*inputs_list)[:, 1] # prediction_list for all slice all_slice_preds_list.append((preds).cpu().data.numpy().squeeze().tolist()) # prediction_list for all cases for a cv-train all_case_pred_list.append(all_slice_preds_list) # prediction_list for all cases for all cv-train cv_pred_list.append(all_case_pred_list) del model, weights_path mean_pred = [] for index in range(len(cv_pred_list[0])): # the average prediction of cv-train for all cases mean_pred.append(np.mean(np.array([list[index] for list in cv_pred_list]), axis=0).tolist()) return case_list, mean_pred, label_list
def ROCofModels(weights_list=None, data_type=['alltrain', 'test']): from Metric.classification_statistics import get_auc, draw_roc device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') input_shape = (192, 192) batch_size = 2 model_folder = model_root + '/ResNeXt_CBAM_CV_20200814' bc = BinaryClassification() fpr_list, tpr_list, auc_list = [], [], [] for type in data_type: spliter = DataSpliter() # sub_list = spliter.LoadName(data_root / '{}-name.csv'.format(data_type), contain_label=True) sub_list = spliter.LoadName(data_root + '/{}-name.csv'.format(type)) if type == 'test': data = DataManager(sub_list=sub_list) data.AddOne(Image2D(data_root + '/T2Slice/Test', shape=input_shape)) data.AddOne( Image2D(data_root + '/AdcSlice/Test', shape=input_shape)) data.AddOne( Image2D(data_root + '/DwiSlice/Test', shape=input_shape)) data.AddOne( Image2D(data_root + '/DistanceMap/Test', shape=input_shape, is_roi=True)) data.AddOne(Label(data_root + '/label.csv', label_tag='Negative'), is_input=False) data_loader = DataLoader(data, batch_size=batch_size, shuffle=False) else: data = DataManager(sub_list=sub_list) data.AddOne(Image2D(data_root + '/T2Slice', shape=input_shape)) data.AddOne(Image2D(data_root + '/AdcSlice', shape=input_shape)) data.AddOne(Image2D(data_root + '/DwiSlice/', shape=input_shape)) data.AddOne( Image2D(data_root + '/DistanceMap', shape=input_shape, is_roi=True)) data.AddOne(Label(data_root + '/label.csv', label_tag='Negative'), is_input=False) data_loader = DataLoader(data, batch_size=batch_size, shuffle=False) cv_folder_list = [ one for one in IterateCase(model_folder, only_folder=True, verbose=0) ] cv_pred_list, cv_label_list = [], [] for cv_index, cv_folder in enumerate(cv_folder_list): model = ResNeXt(3, 2).to(device) if weights_list is None: one_fold_weights_list = [ one for one in IterateCase( cv_folder, only_folder=False, verbose=0) if one.is_file() ] one_fold_weights_list = sorted( one_fold_weights_list, key=lambda x: os.path.getctime(str(x))) weights_path = one_fold_weights_list[-1] else: weights_path = weights_list[cv_index] print(weights_path.name) model.load_state_dict(torch.load(str(weights_path))) pred_list, label_list = [], [] model.eval() for inputs, outputs in data_loader: inputs = MoveTensorsToDevice(inputs, device) outputs = MoveTensorsToDevice(outputs, device) preds = model(*inputs)[:, 1] pred_list.extend( (1 - preds).cpu().data.numpy().squeeze().tolist()) label_list.extend(( 1 - outputs).cpu().data.numpy().astype(int).squeeze().tolist()) # pred_list.extend((preds).cpu().data.numpy().squeeze().tolist()) # label_list.extend((outputs).cpu().data.numpy().astype(int).squeeze().tolist()) fpr, tpr, auc = get_auc(pred_list, label_list) fpr_list.append(fpr) tpr_list.append(tpr) auc_list.append(auc) cv_pred_list.append(pred_list) cv_label_list.append(label_list) del model, weights_path cv_pred = np.array(cv_pred_list) cv_label = np.array(cv_label_list) mean_pred = np.mean(cv_pred, axis=0) mean_label = np.mean(cv_label, axis=0) fpr, tpr, auc = get_auc(mean_pred, mean_label) fpr_list.append(fpr) tpr_list.append(tpr) auc_list.append(auc) # draw_roc(fpr_list, tpr_list, auc_list, name_list=['cv0', 'cv1', 'cv2', 'cv3', 'cv4', 'alltrian']) name_list = [ 'model1', 'model2', 'model3', 'model4', 'model5', 'model combined' ] for idx in range(len(fpr_list)): label = name_list[idx] + ': ' + '%.3f' % auc_list[idx] plt.plot(fpr_list[idx], tpr_list[idx], label=label) plt.plot([0, 1], [0, 1], '--', color='r') plt.xlabel('False Positive Rate') plt.ylabel('True Positive Rate') plt.legend() plt.show()
from CnnTools.T4T.Utility.Data import * from BasicTool.MeDIT.Normalize import Normalize01 from BasicTool.MeDIT.Visualization import FusionImage, ShowColorByRoi from SYECE.model import ResNeXt # from SYECE.ModelWithoutDis import ResNeXt from BasicTool.MeDIT.ArrayProcess import ExtractPatch from ECEDataProcess.DataProcess.MaxRoi import GetRoiCenterBefore, GetRoiCenter from torch.utils.data import DataLoader # device = torch.device('cpu') # model_root = r'/home/zhangyihong/Documents/ProstateECE/Model/ResNeXt_CBAM_CV_20200820/CV_0/31--5.778387.pt' model_root = r'/home/zhangyihong/Documents/ProstateECE/Model/ResNeXt_CBAM_CV_20200814/CV_1/154--7.698224.pt' data_root = r'/home/zhangyihong/Documents/ProstateECE/NPYNoDivide' output_dir = r'/home/zhangyihong/Documents/ProstateECE/Model' model = ResNeXt(3, 2).to(device) model.load_state_dict(torch.load(model_root)) model.to(device) input_shape = (192, 192) sub_list = pd.read_csv( r'/home/zhangyihong/Documents/ProstateECE/NPYNoDivide/test-name.csv') sub_list = sub_list.values.tolist()[0][1:] data = DataManager(sub_list=sub_list) data.AddOne(Image2D(data_root + '/T2Slice', shape=input_shape)) data.AddOne(Image2D(data_root + '/AdcSlice', shape=input_shape)) data.AddOne(Image2D(data_root + '/DwiSlice', shape=input_shape)) data.AddOne( Image2D(data_root + '/DistanceMap', shape=input_shape, is_roi=True)) data.AddOne(
def EnsembleInference(data_type, model_name, add_type, weights_list=None): device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') input_shape = (192, 192) batch_size = 2 model_folder = os.path.join(model_root, model_name) bc = BinaryClassification() spliter = DataSpliter() sub_list = spliter.LoadName(data_root + '/{}-name.csv'.format(data_type)) if data_type == 'test': data = DataManager(sub_list=sub_list) data.AddOne(Image2D(data_root + '/T2Slice/Test', shape=input_shape)) data.AddOne(Image2D(data_root + '/AdcSlice/Test', shape=input_shape)) data.AddOne(Image2D(data_root + '/DwiSlice/Test', shape=input_shape)) data.AddOne( Image2D(data_root + '/' + add_type + '/Test', shape=input_shape, is_roi=True)) data.AddOne(Label(data_root + '/label.csv', label_tag='Positive'), is_input=False) data_loader = DataLoader(data, batch_size=batch_size, shuffle=False) else: data = DataManager(sub_list=sub_list) data.AddOne(Image2D(data_root + '/T2Slice', shape=input_shape)) data.AddOne(Image2D(data_root + '/AdcSlice', shape=input_shape)) data.AddOne(Image2D(data_root + '/DwiSlice', shape=input_shape)) data.AddOne( Image2D(data_root + '/' + add_type, shape=input_shape, is_roi=True)) data.AddOne(Label(data_root + '/label.csv', label_tag='Positive'), is_input=False) data_loader = DataLoader(data, batch_size=batch_size, shuffle=False) cv_folder_list = [ one for one in IterateCase(model_folder, only_folder=True, verbose=0) ] cv_pred_list, cv_label_list = [], [] for cv_index, cv_folder in enumerate(cv_folder_list): model = ResNeXt(3, 2).to(device) if weights_list is None: one_fold_weights_list = [ one for one in IterateCase(cv_folder, only_folder=False, verbose=0) if one.is_file() ] one_fold_weights_list = sorted( one_fold_weights_list, key=lambda x: os.path.getctime(str(x))) weights_path = one_fold_weights_list[-1] else: weights_path = weights_list[cv_index] print(weights_path.name) model.load_state_dict(torch.load(str(weights_path))) pred_list, label_list = [], [] model.eval() for inputs, outputs in data_loader: inputs = MoveTensorsToDevice(inputs, device) outputs = MoveTensorsToDevice(outputs, device) preds = model(*inputs)[:, 1] pred_list.extend((preds).cpu().data.numpy().squeeze().tolist()) label_list.extend( (outputs).cpu().data.numpy().astype(int).squeeze().tolist()) # bc.Run(pred_list, label_list) cv_pred_list.append(pred_list) cv_label_list.append(label_list) del model, weights_path cv_pred = np.array(cv_pred_list) cv_label = np.array(cv_label_list) mean_pred = np.mean(cv_pred, axis=0) mean_label = np.mean(cv_label, axis=0) np.save( os.path.join( r'/home/zhangyihong/Documents/ProstateECE/Result/NPY/ChangeAtten', add_type + data_type + '_label.npy'), mean_label) np.save( os.path.join( r'/home/zhangyihong/Documents/ProstateECE/Result/NPY/ChangeAtten', add_type + data_type + '_pred.npy'), mean_pred) print(mean_label) bc.Run(mean_pred.tolist(), mean_label.astype(int).tolist())
def ModelSUH(model_name, add_type, weights_list=None): from Metric.classification_statistics import get_auc device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') input_shape = (192, 192) batch_size = 2 model_folder = os.path.join(model_root, model_name) bc = BinaryClassification() fpr_list, tpr_list, auc_list = [], [], [] data = DataManager() data.AddOne(Image2D(data_root + '/T2Slice', shape=input_shape)) data.AddOne(Image2D(data_root + '/AdcSlice', shape=input_shape)) data.AddOne(Image2D(data_root + '/DwiSlice', shape=input_shape)) data.AddOne( Image2D(data_root + '/' + add_type, shape=input_shape, is_roi=True)) data.AddOne(Label(data_root + '/label_negative.csv', label_tag='Positive'), is_input=False) data_loader = DataLoader(data, batch_size=batch_size, shuffle=False) cv_folder_list = [ one for one in IterateCase(model_folder, only_folder=True, verbose=0) ] cv_pred_list, cv_label_list = [], [] for cv_index, cv_folder in enumerate(cv_folder_list): model = ResNeXt(3, 2).to(device) if weights_list is None: one_fold_weights_list = [ one for one in IterateCase(cv_folder, only_folder=False, verbose=0) if one.is_file() ] one_fold_weights_list = sorted( one_fold_weights_list, key=lambda x: os.path.getctime(str(x))) weights_path = one_fold_weights_list[-1] else: weights_path = weights_list[cv_index] print(weights_path.name) model.load_state_dict(torch.load(str(weights_path))) pred_list, label_list = [], [] model.eval() for inputs, outputs in data_loader: inputs = MoveTensorsToDevice(inputs, device) outputs = MoveTensorsToDevice(outputs, device) preds = model(*inputs)[:, 1] if isinstance((1 - preds).cpu().data.numpy().squeeze().tolist(), float): pred_list.append((preds).cpu().data.numpy().squeeze().tolist()) label_list.append(( outputs).cpu().data.numpy().astype(int).squeeze().tolist()) else: pred_list.extend((preds).cpu().data.numpy().squeeze().tolist()) label_list.extend(( outputs).cpu().data.numpy().astype(int).squeeze().tolist()) cv_pred_list.append(pred_list) cv_label_list.append(label_list) del model, weights_path cv_pred = np.array(cv_pred_list) cv_label = np.array(cv_label_list) mean_pred = np.mean(cv_pred, axis=0) mean_label = np.mean(cv_label, axis=0) np.save( os.path.join( r'/home/zhangyihong/Documents/ProstateECE/Result/NPY/ChangeAtten', add_type + 'SUH_label.npy'), mean_label) np.save( os.path.join( r'/home/zhangyihong/Documents/ProstateECE/Result/NPY/ChangeAtten', add_type + 'SUH_pred.npy'), mean_pred) fpr, tpr, auc = get_auc(mean_pred, mean_label) fpr_list.append(fpr) tpr_list.append(tpr) auc_list.append(auc) result1 = bc.Run(mean_pred.tolist(), mean_label.astype(int).tolist()) print(result1)
def ModelEnhancedSUH(weights_list=None, is_dismap=True): model_root = r'/home/zhangyihong/Documents/ProstateECE/Model' device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') if is_dismap: from SYECE.model import ResNeXt model_folder = model_root + '/ResNeXt_CBAM_CV_20200814' else: from SYECE.ModelWithoutDis import ResNeXt model_folder = model_root + '/ResNeXt_CBAM_CV_20200820' bc = BinaryClassification() cv_folder_list = [ one for one in IterateCase(model_folder, only_folder=True, verbose=0) ] cv_pred_list, cv_label_list = [], [] for cv_index, cv_folder in enumerate(cv_folder_list): model = ResNeXt(3, 2).to(device) if weights_list is None: one_fold_weights_list = [ one for one in IterateCase(cv_folder, only_folder=False, verbose=0) if one.is_file() ] one_fold_weights_list = sorted( one_fold_weights_list, key=lambda x: os.path.getctime(str(x))) weights_path = one_fold_weights_list[-1] else: weights_path = weights_list[cv_index] print(weights_path.name) model.load_state_dict(torch.load(str(weights_path))) pred_list_enhanced, label_list_enhanced = [], [] model.eval() for i in range(9): print(i) pred_list, label_list = [], [] if i == 0: data_loader = EnhancedTestSUH(is_dismap, param_config) else: data_loader = EnhancedTestSUH(is_dismap) for inputs, outputs in data_loader: inputs = MoveTensorsToDevice(inputs, device) outputs = MoveTensorsToDevice(outputs, device) preds = model(*inputs)[:, 1] if isinstance( (1 - preds).cpu().data.numpy().squeeze().tolist(), float): if is_dismap: pred_list.append( (1 - preds).cpu().data.numpy().squeeze().tolist()) label_list.append( (1 - outputs).cpu().data.numpy().astype( int).squeeze().tolist()) else: pred_list.append( (preds).cpu().data.numpy().squeeze().tolist()) label_list.append((outputs).cpu().data.numpy().astype( int).squeeze().tolist()) else: if is_dismap: pred_list.extend( (1 - preds).cpu().data.numpy().squeeze().tolist()) label_list.extend( (1 - outputs).cpu().data.numpy().astype( int).squeeze().tolist()) else: pred_list.extend( (preds).cpu().data.numpy().squeeze().tolist()) label_list.extend((outputs).cpu().data.numpy().astype( int).squeeze().tolist()) pred_list_enhanced.append(pred_list), label_list_enhanced.append( label_list) cv_pred_list.append( np.mean(np.array(pred_list_enhanced), axis=0).tolist()) cv_label_list.append( np.mean(np.array(label_list_enhanced), axis=0).tolist()) fpr, tpr, auc = get_auc( np.mean(pred_list_enhanced, axis=0).tolist(), np.mean(label_list_enhanced, axis=0).tolist()) print('AUC: {}'.format(auc)) del model, weights_path cv_pred = np.array(cv_pred_list) cv_label = np.array(cv_label_list) mean_pred = np.mean(cv_pred, axis=0) mean_label = np.mean(cv_label, axis=0) bc.Run(mean_pred.tolist(), mean_label.astype(int).tolist()) return mean_pred, mean_label
def ModelTest(model_folder, data_root, weights_list=None, data_type=None): if data_type is None: data_type = ['alltrain', 'test'] from Metric.classification_statistics import get_auc, draw_roc device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') input_shape = (192, 192) batch_size = 2 bc = BinaryClassification() fpr_list, tpr_list, auc_list = [], [], [] for type in data_type: spliter = DataSpliter() sub_list = spliter.LoadName(data_root + '/{}-name.csv'.format(type)) if type == 'test': data = DataManager(sub_list=sub_list) # data.AddOne(Image2D(data_root + '/T2Slice/Test', shape=input_shape)) data.AddOne( Image2D(data_root + '/AdcSlice/Test', shape=input_shape)) data.AddOne( Image2D(data_root + '/DwiSlice/Test', shape=input_shape)) data.AddOne( Image2D(data_root + '/DistanceMap/Test', shape=input_shape, is_roi=True)) data.AddOne(Label(data_root + '/label.csv', label_tag='Positive'), is_input=False) data_loader = DataLoader(data, batch_size=batch_size, shuffle=False) else: data = DataManager(sub_list=sub_list) # data.AddOne(Image2D(data_root + '/T2Slice', shape=input_shape)) data.AddOne(Image2D(data_root + '/AdcSlice', shape=input_shape)) data.AddOne(Image2D(data_root + '/DwiSlice/', shape=input_shape)) data.AddOne( Image2D(data_root + '/DistanceMap', shape=input_shape, is_roi=True)) data.AddOne(Label(data_root + '/label.csv', label_tag='Positive'), is_input=False) data_loader = DataLoader(data, batch_size=batch_size, shuffle=False) cv_folder_list = [ one for one in IterateCase(model_folder, only_folder=True, verbose=0) ] cv_pred_list, cv_label_list = [], [] for cv_index, cv_folder in enumerate(cv_folder_list): model = ResNeXt(2, 2).to(device) if weights_list is None: one_fold_weights_list = [ one for one in IterateCase( cv_folder, only_folder=False, verbose=0) if one.is_file() ] one_fold_weights_list = sorted( one_fold_weights_list, key=lambda x: os.path.getctime(str(x))) weights_path = one_fold_weights_list[-1] else: weights_path = weights_list[cv_index] print(weights_path.name) model.load_state_dict(torch.load(str(weights_path))) pred_list, label_list = [], [] model.eval() for inputs, outputs in data_loader: inputs = MoveTensorsToDevice(inputs, device) outputs = MoveTensorsToDevice(outputs, device) preds = model(*inputs)[:, 1] pred_list.extend((preds).cpu().data.numpy().squeeze().tolist()) label_list.extend(( outputs).cpu().data.numpy().astype(int).squeeze().tolist()) cv_pred_list.append(pred_list) cv_label_list.append(label_list) del model, weights_path cv_pred = np.array(cv_pred_list) cv_label = np.array(cv_label_list) mean_pred = np.mean(cv_pred, axis=0) mean_label = np.mean(cv_label, axis=0) fpr, tpr, auc = get_auc(mean_pred, mean_label) fpr_list.append(fpr) tpr_list.append(tpr) auc_list.append(auc) print(type) bc.Run(mean_pred.tolist(), mean_label.astype(int).tolist()) draw_roc(fpr_list, tpr_list, auc_list, name_list=['alltrian', 'test'])