Пример #1
0
def Test():
    test_loader = LoadTestData(is_test=True)
    train_loader, validation_loader = LoadTVData(is_test=True)

    model = ResNet(Bottleneck, [3, 4, 6, 3]).to(device)
    model.load_state_dict(torch.load(model_path))

    fpr_list, tpr_list, auc_list = [], [], []

    name_list = ['Train', 'Validation', 'Test']
    loader_list = [train_loader, validation_loader, test_loader]

    model.eval()
    # with torch.no_grad():
    for name_num, loader in enumerate(loader_list):
        class_list, class_pred_list = [], []
        for i, (inputs, outputs) in enumerate(loader):
            t2, dwi, adc, roi, prostate = inputs[0], inputs[1], inputs[
                2], inputs[3], inputs[4]
            ece = np.argmax(outputs, axis=1)

            inputs = torch.cat([t2, dwi, adc, roi, prostate], axis=1)
            inputs = inputs.type(torch.FloatTensor).to(device)

            ece = ece.type(torch.LongTensor).to(device)

            class_out = model(inputs)
            class_out = torch.squeeze(class_out, dim=1)
            class_out_softmax = nn.functional.softmax(class_out, dim=1)

            class_list.extend(list(ece.cpu().numpy()))
            class_pred_list.extend(
                list(class_out_softmax.cpu().detach().numpy()[..., 1]))

        fpr, tpr, auc = get_auc(class_pred_list, class_list)
        fpr_list.append(fpr)
        tpr_list.append(tpr)
        auc_list.append(auc)
    draw_roc(fpr_list, tpr_list, auc_list, name_list)
Пример #2
0
def Test():
    test_loader = LoadTestData(data_folder)
    train_loader, validation_loader = LoadTVData(data_folder, is_test=True)

    model = ResNeXt(5, 1, [3, 4, 6, 3]).to(device)
    model.load_state_dict(torch.load(model_path))

    fpr_list, tpr_list, auc_list = [], [], []

    name_list = ['Train', 'Validation', 'Test']
    loader_list = [train_loader, validation_loader, test_loader]

    # with torch.no_grad():
    model.eval()
    for name_num, loader in enumerate(loader_list):
        class_list, class_pred_list = [], []
        for i, (inputs, outputs) in enumerate(loader):
            t2, dwi, adc, pca, prostate = inputs[0], inputs[1], inputs[
                2], inputs[3], inputs[4]
            ece = outputs.to(device)

            inputs = torch.cat([t2, dwi, adc, pca, prostate], dim=1)
            inputs = inputs.float().to(device)

            class_out = model(inputs)
            class_out_sigmoid = class_out.sigmoid()

            class_list.extend(list(ece.cpu().numpy()))
            class_pred_list.extend(
                list(class_out_sigmoid.cpu().detach().numpy()))

        fpr, tpr, auc = get_auc(class_pred_list, class_list)
        fpr_list.append(fpr)
        tpr_list.append(tpr)
        auc_list.append(auc)
    draw_roc(fpr_list, tpr_list, auc_list, name_list)
Пример #3
0
def ModelTest(weights_list=None, data_type=None):
    if data_type is None:
        data_type = ['alltrain', 'test']
    from Metric.classification_statistics import get_auc, draw_roc
    device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
    input_shape = (192, 192)
    batch_size = 2
    model_folder = model_root + '/PAGNet_MaxPred'
    bc = BinaryClassification()

    fpr_list, tpr_list, auc_list = [], [], []
    train_result, test_result = {}, {}
    for type in data_type:
        spliter = DataSpliter()
        data_folder = os.path.join(data_root, type)
        sub_list = spliter.LoadName(data_root + '/{}-name.csv'.format(type))

        data = DataManager(sub_list=sub_list)
        data.AddOne(Image2D(data_folder + '/T2Slice', shape=input_shape))
        data.AddOne(Image2D(data_folder + '/AdcSlice', shape=input_shape))
        data.AddOne(Image2D(data_folder + '/DwiSlice', shape=input_shape))
        data.AddOne(Image2D(data_folder + '/DistanceMap', shape=input_shape, is_roi=True))
        data.AddOne(Label(data_root + '/label.csv', label_tag='Positive'), is_input=False)
        data_loader = DataLoader(data, batch_size=batch_size, shuffle=False)

        cv_folder_list = [one for one in IterateCase(model_folder, only_folder=True, verbose=0)]
        cv_pred_list, cv_label_list = [], []

        for cv_index, cv_folder in enumerate(cv_folder_list):
            model = ResNeXt(3, 2).to(device)
            if weights_list is None:
                one_fold_weights_list = [one for one in IterateCase(cv_folder, only_folder=False, verbose=0) if
                                         one.is_file()]
                one_fold_weights_list = sorted(one_fold_weights_list, key=lambda x: os.path.getctime(str(x)))
                weights_path = one_fold_weights_list[-1]
            else:
                weights_path = weights_list[cv_index]

            print(weights_path.name)
            model.load_state_dict(torch.load(str(weights_path)))

            pred_list, label_list = [], []
            model.eval()
            for inputs, outputs in data_loader:
                inputs = MoveTensorsToDevice(inputs, device)
                outputs = MoveTensorsToDevice(outputs, device)

                preds = model(*inputs)[:, 1]
                pred_list.extend((1 - preds).cpu().data.numpy().squeeze().tolist())
                label_list.extend((1 - outputs).cpu().data.numpy().astype(int).squeeze().tolist())
                # pred_list.extend((preds).cpu().data.numpy().squeeze().tolist())
                # label_list.extend((outputs).cpu().data.numpy().astype(int).squeeze().tolist())

            cv_pred_list.append(pred_list)
            cv_label_list.append(label_list)

            del model, weights_path

        cv_pred = np.array(cv_pred_list)
        cv_label = np.array(cv_label_list)
        mean_pred = np.mean(cv_pred, axis=0)
        mean_label = np.mean(cv_label, axis=0)

        fpr, tpr, auc = get_auc(mean_pred, mean_label)
        fpr_list.append(fpr)
        tpr_list.append(tpr)
        auc_list.append(auc)

        if type == 'alltrain':
            train_result = bc.Run(mean_pred.tolist(), mean_label.astype(int).tolist(), threshould=None)
            print(train_result)
        elif type == 'test':
            test_result = bc.Run(mean_pred.tolist(), mean_label.astype(int).tolist(), threshould=train_result['Youden Index'])
            print(test_result)

    draw_roc(fpr_list, tpr_list, auc_list, name_list=['alltrian', 'test'])
Пример #4
0
def ModelSUH(weights_list=None):
    from Metric.classification_statistics import get_auc
    device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
    input_shape = (192, 192)
    batch_size = 2
    model_folder = model_root + '/ResNeXt_CBAM_CV_20200820'
    bc = BinaryClassification()

    fpr_list, tpr_list, auc_list = [], [], []

    data = DataManager()
    data.AddOne(Image2D(data_root + '/T2Slice', shape=input_shape))
    data.AddOne(Image2D(data_root + '/AdcSlice', shape=input_shape))
    data.AddOne(Image2D(data_root + '/DwiSlice', shape=input_shape))
    # data.AddOne(Image2D(data_root + '/DistanceMap', shape=input_shape, is_roi=True))
    # data.AddOne(Label(data_root + '/label_negative.csv', label_tag='Negative'), is_input=False)
    data.AddOne(Label(data_root + '/label_negative.csv', label_tag='Positive'), is_input=False)
    data_loader = DataLoader(data, batch_size=batch_size, shuffle=False)

    cv_folder_list = [one for one in IterateCase(model_folder, only_folder=True, verbose=0)]
    cv_pred_list, cv_label_list = [], []
    cv_fcn_out_list = []
    for cv_index, cv_folder in enumerate(cv_folder_list):
        model = ResNeXt(3, 2).to(device)
        if weights_list is None:
            one_fold_weights_list = [one for one in IterateCase(cv_folder, only_folder=False, verbose=0) if
                                     one.is_file()]
            one_fold_weights_list = sorted(one_fold_weights_list, key=lambda x: os.path.getctime(str(x)))
            weights_path = one_fold_weights_list[-1]
        else:
            weights_path = weights_list[cv_index]

        print(weights_path.name)
        model.load_state_dict(torch.load(str(weights_path)))

        pred_list, label_list = [], []
        fcn_out_list = []
        model.eval()
        for inputs, outputs in data_loader:
            inputs = MoveTensorsToDevice(inputs, device)
            outputs = MoveTensorsToDevice(outputs, device)

            # preds = model(*inputs)[:, 1]
            model_pred = model(*inputs)
            preds = model_pred[0][:, 1]

            if isinstance((1 - preds).cpu().data.numpy().squeeze().tolist(), float):
                    # pred_list.append((1 - preds).cpu().data.numpy().squeeze().tolist())
                    # label_list.append((1 - outputs).cpu().data.numpy().astype(int).squeeze().tolist())
                    pred_list.append((preds).cpu().data.numpy().squeeze().tolist())
                    label_list.append((outputs).cpu().data.numpy().astype(int).squeeze().tolist())
                    fcn_out_list.append(model_pred[1].cpu().data.numpy().squeeze().tolist())
            else:
                    # pred_list.extend((1 - preds).cpu().data.numpy().squeeze().tolist())
                    # label_list.extend((1 - outputs).cpu().data.numpy().astype(int).squeeze().tolist())
                    pred_list.extend((preds).cpu().data.numpy().squeeze().tolist())
                    label_list.extend((outputs).cpu().data.numpy().astype(int).squeeze().tolist())
                    fcn_out_list.extend(model_pred[1].cpu().data.numpy().squeeze().tolist())

        cv_pred_list.append(pred_list)
        cv_label_list.append(label_list)
        cv_fcn_out_list.append(fcn_out_list)

        del model, weights_path

    cv_pred = np.array(cv_pred_list)
    cv_label = np.array(cv_label_list)
    cv_fcn = np.array(cv_fcn_out_list)
    mean_pred = np.mean(cv_pred, axis=0)
    mean_label = np.mean(cv_label, axis=0)
    mean_fcn_out = np.mean(cv_fcn, axis=0)
    np.save(r'/home/zhangyihong/Documents/ProstateECE/Result/NPY/FcnOut/aver_fcn_ResNeXt_suh.npy', mean_fcn_out)

    fpr, tpr, auc = get_auc(mean_pred, mean_label)
    fpr_list.append(fpr)
    tpr_list.append(tpr)
    auc_list.append(auc)

    result1 = bc.Run(mean_pred.tolist(), mean_label.astype(int).tolist())
    print(result1)
Пример #5
0
def Train():
    train_loader, validation_loader = LoadTVData()
    model = ResNet(Bottleneck, [3, 4, 6, 3]).to(device)
    optimizer = torch.optim.Adam(model.parameters(), lr=0.001)
    train_loss = 0.0
    valid_loss = 0.0
    cla_criterion = nn.BCEWithLogitsLoss()
    scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, mode='min', patience=20, factor=0.5, verbose=True)
    early_stopping = EarlyStopping(patience=100, verbose=True)
    writer = SummaryWriter(log_dir=graph_path, comment='Net')

    for epoch in range(1000):
        train_loss_list, valid_loss_list = [], []
        class_list, class_pred_list = [], []

        model.train()
        for i, (inputs, outputs) in enumerate(train_loader):
            t2, dwi, adc, roi, prostate = inputs[0], inputs[1], inputs[2], inputs[3], inputs[4]
            ece = np.squeeze(outputs, axis=1)

            inputs = torch.cat([t2, dwi, adc, roi, prostate], axis=1)
            inputs = inputs.type(torch.FloatTensor).to(device)

            ece = ece.type(torch.FloatTensor).to(device)

            optimizer.zero_grad()

            class_out, _ = model(inputs)
            class_out = torch.squeeze(class_out, dim=1)
            class_out_sigmoid = class_out.sigmoid()

            loss = cla_criterion(class_out, ece)

            loss.backward()
            optimizer.step()

            train_loss += loss.item()
            train_loss_list.append(loss.item())

            # compute auc
            class_list.extend(list(ece.cpu().numpy()))
            class_pred_list.extend(list(class_out_sigmoid.cpu().detach().numpy()))

            if (i + 1) % 10 == 0:
                print('Epoch [%d / %d], Iter [%d], Train Loss: %.4f' %(epoch + 1, 1000, i + 1, train_loss / 10))
                print(list(class_out_sigmoid.cpu().detach().numpy()))
                train_loss = 0.0

        _, _, train_auc = get_auc(class_pred_list, class_list)
        class_list, class_pred_list = [], []

        model.eval()
        with torch.no_grad():
            for i, (inputs, outputs) in enumerate(validation_loader):
                t2, dwi, adc, roi, prostate = inputs[0], inputs[1], inputs[2], inputs[3], inputs[4]
                ece = np.squeeze(outputs, axis=1)

                inputs = torch.cat([t2, dwi, adc, roi, prostate], axis=1)
                inputs = inputs.type(torch.FloatTensor).to(device)

                ece = ece.type(torch.FloatTensor).to(device)

                class_out, _ = model(inputs)
                class_out = torch.squeeze(class_out, dim=1)
                class_out_sigmoid = class_out.sigmoid()

                loss = cla_criterion(class_out, ece)

                valid_loss += loss.item()
                valid_loss_list.append(loss.item())

                # compute auc
                class_list.extend(list(ece.cpu().numpy()))
                class_pred_list.extend(list(class_out_sigmoid.cpu().detach().numpy()))

                if (i + 1) % 10 == 0:
                    print('Epoch [%d / %d], Iter [%d],  Valid Loss: %.4f' %(epoch + 1, 1000, i + 1, valid_loss / 10))
                    print(list(class_out_sigmoid.cpu().detach().numpy()))
                    valid_loss = 0.0
            _, _, valid_auc = get_auc(class_pred_list, class_list)

        for index, (name, param) in enumerate(model.named_parameters()):
            if 'bn' not in name:
                writer.add_histogram(name + '_data', param.cpu().data.numpy(), epoch + 1)

        writer.add_scalars('Train_Val_Loss',
                           {'train_loss': np.mean(train_loss_list), 'val_loss': np.mean(valid_loss_list)}, epoch + 1)
        writer.add_scalars('Train_Val_auc',
                           {'train_auc': train_auc, 'val_auc': valid_auc}, epoch + 1)
        writer.close()

        print('Epoch:', epoch + 1, 'Training Loss:', np.mean(train_loss_list), 'Valid Loss:',
              np.mean(valid_loss_list), 'Train auc:', train_auc, 'Valid auc:', valid_auc)

        scheduler.step(np.mean(valid_loss_list))
        early_stopping(sum(valid_loss_list)/len(valid_loss_list), model, save_path=model_folder, evaluation=min)

        if early_stopping.early_stop:
            print("Early stopping")
            break
Пример #6
0
def ROCofModels(weights_list=None, data_type=['alltrain', 'test']):
    from Metric.classification_statistics import get_auc, draw_roc
    device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
    input_shape = (192, 192)
    batch_size = 2
    model_folder = model_root + '/ResNeXt_CBAM_CV_20200814'
    bc = BinaryClassification()
    fpr_list, tpr_list, auc_list = [], [], []
    for type in data_type:
        spliter = DataSpliter()
        # sub_list = spliter.LoadName(data_root / '{}-name.csv'.format(data_type), contain_label=True)
        sub_list = spliter.LoadName(data_root + '/{}-name.csv'.format(type))

        if type == 'test':
            data = DataManager(sub_list=sub_list)
            data.AddOne(Image2D(data_root + '/T2Slice/Test',
                                shape=input_shape))
            data.AddOne(
                Image2D(data_root + '/AdcSlice/Test', shape=input_shape))
            data.AddOne(
                Image2D(data_root + '/DwiSlice/Test', shape=input_shape))
            data.AddOne(
                Image2D(data_root + '/DistanceMap/Test',
                        shape=input_shape,
                        is_roi=True))
            data.AddOne(Label(data_root + '/label.csv', label_tag='Negative'),
                        is_input=False)
            data_loader = DataLoader(data,
                                     batch_size=batch_size,
                                     shuffle=False)
        else:
            data = DataManager(sub_list=sub_list)
            data.AddOne(Image2D(data_root + '/T2Slice', shape=input_shape))
            data.AddOne(Image2D(data_root + '/AdcSlice', shape=input_shape))
            data.AddOne(Image2D(data_root + '/DwiSlice/', shape=input_shape))
            data.AddOne(
                Image2D(data_root + '/DistanceMap',
                        shape=input_shape,
                        is_roi=True))
            data.AddOne(Label(data_root + '/label.csv', label_tag='Negative'),
                        is_input=False)
            data_loader = DataLoader(data,
                                     batch_size=batch_size,
                                     shuffle=False)

        cv_folder_list = [
            one
            for one in IterateCase(model_folder, only_folder=True, verbose=0)
        ]
        cv_pred_list, cv_label_list = [], []

        for cv_index, cv_folder in enumerate(cv_folder_list):
            model = ResNeXt(3, 2).to(device)
            if weights_list is None:
                one_fold_weights_list = [
                    one for one in IterateCase(
                        cv_folder, only_folder=False, verbose=0)
                    if one.is_file()
                ]
                one_fold_weights_list = sorted(
                    one_fold_weights_list,
                    key=lambda x: os.path.getctime(str(x)))
                weights_path = one_fold_weights_list[-1]
            else:
                weights_path = weights_list[cv_index]

            print(weights_path.name)
            model.load_state_dict(torch.load(str(weights_path)))

            pred_list, label_list = [], []
            model.eval()
            for inputs, outputs in data_loader:
                inputs = MoveTensorsToDevice(inputs, device)
                outputs = MoveTensorsToDevice(outputs, device)

                preds = model(*inputs)[:, 1]
                pred_list.extend(
                    (1 - preds).cpu().data.numpy().squeeze().tolist())
                label_list.extend((
                    1 -
                    outputs).cpu().data.numpy().astype(int).squeeze().tolist())
                # pred_list.extend((preds).cpu().data.numpy().squeeze().tolist())
                # label_list.extend((outputs).cpu().data.numpy().astype(int).squeeze().tolist())

            fpr, tpr, auc = get_auc(pred_list, label_list)
            fpr_list.append(fpr)
            tpr_list.append(tpr)
            auc_list.append(auc)

            cv_pred_list.append(pred_list)
            cv_label_list.append(label_list)

            del model, weights_path

        cv_pred = np.array(cv_pred_list)
        cv_label = np.array(cv_label_list)
        mean_pred = np.mean(cv_pred, axis=0)
        mean_label = np.mean(cv_label, axis=0)

        fpr, tpr, auc = get_auc(mean_pred, mean_label)
        fpr_list.append(fpr)
        tpr_list.append(tpr)
        auc_list.append(auc)

    # draw_roc(fpr_list, tpr_list, auc_list, name_list=['cv0', 'cv1', 'cv2', 'cv3', 'cv4', 'alltrian'])
    name_list = [
        'model1', 'model2', 'model3', 'model4', 'model5', 'model combined'
    ]
    for idx in range(len(fpr_list)):
        label = name_list[idx] + ': ' + '%.3f' % auc_list[idx]
        plt.plot(fpr_list[idx], tpr_list[idx], label=label)

    plt.plot([0, 1], [0, 1], '--', color='r')
    plt.xlabel('False Positive Rate')
    plt.ylabel('True Positive Rate')
    plt.legend()
    plt.show()
Пример #7
0
def Test():
    from Metric.Dice import Dice
    import matplotlib.pyplot as plt
    dice = Dice()
    test_loader = LoadTestData()
    train_loader, validation_loader = LoadTVData(is_test=True)

    model = MultiTaskModel(in_channels=3, out_channels=1).to(device)
    model.load_state_dict(torch.load(model_path))

    fpr_list, tpr_list, auc_list = [], [], []

    name_list = ['Train', 'Validation', 'Test']
    loader_list = [train_loader, validation_loader, test_loader]

    # with torch.no_grad():
    model.eval()
    for name_num, loader in enumerate(loader_list):
        class_list, class_pred_list = [], []
        prostate_list, prostate_pred_list = [], []
        roi_list, roi_pred_list = [], []
        prostate_dice, roi_dice = [], []
        for i, (inputs, outputs) in enumerate(loader):
            t2, dwi, adc = inputs[0], inputs[1], inputs[2],
            ece, roi, prostate = np.squeeze(
                outputs[0],
                axis=1), outputs[1].to(device), outputs[2].to(device)

            inputs = torch.cat([t2, dwi, adc], axis=1)
            inputs = inputs.type(torch.FloatTensor).to(device)

            ece = np.argmax(ece, axis=1)
            ece = ece.type(torch.LongTensor).to(device)

            roi_out, prostate_out, class_out, _ = model(inputs)
            class_out_softmax = nn.functional.softmax(class_out, dim=1)

            class_list.extend(list(ece.cpu().numpy()))
            class_pred_list.extend(
                list(class_out_softmax.cpu().detach().numpy()[..., 1]))

            prostate_out = BinaryPred(prostate_out).cpu().detach()
            prostate = prostate.cpu()
            prostate_pred_list.extend(list(prostate_out))
            prostate_list.extend(list(prostate))

            roi_out = BinaryPred(roi_out).cpu().detach()
            roi = roi.cpu()
            roi_pred_list.extend(list(roi_out))
            roi_list.extend(list(roi))

        for idx in range(len(roi_list)):
            roi_dice.append(dice(roi_list[idx], roi_pred_list[idx]).numpy())
            prostate_dice.append(
                dice(prostate_list[idx], prostate_pred_list[idx]).numpy())
        print('average dice of roi:', sum(roi_dice) / len(roi_dice))
        print('average dice of prostate:',
              sum(prostate_dice) / len(prostate_dice))
        plt.hist(roi_dice)
        plt.title('Dice of Prostate Cancer in ' + name_list[name_num])
        plt.show()

        plt.hist(prostate_dice)
        plt.title('Dice of Prostate in ' + name_list[name_num])
        plt.show()

        fpr, tpr, auc = get_auc(class_pred_list, class_list)
        fpr_list.append(fpr)
        tpr_list.append(tpr)
        auc_list.append(auc)
    draw_roc(fpr_list, tpr_list, auc_list, name_list)
Пример #8
0
def Train():
    ClearGraphPath()
    train_loader, validation_loader = LoadTVData()
    model = MultiTaskModel(in_channels=3, out_channels=1).to(device)
    optimizer = torch.optim.Adam(model.parameters(), lr=0.001)
    train_loss1 = 0.0
    train_loss2 = 0.0
    train_loss3 = 0.0
    train_loss = 0.0
    valid_loss1 = 0.0
    valid_loss2 = 0.0
    valid_loss3 = 0.0
    valid_loss = 0.0
    seg_criterion1 = DiceLoss()
    seg_criterion2 = DiceLoss()
    cla_criterion = nn.CrossEntropyLoss()
    scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer,
                                                           mode='min',
                                                           patience=20,
                                                           factor=0.5,
                                                           verbose=True)
    early_stopping = EarlyStopping(patience=100, verbose=True)
    writer = SummaryWriter(log_dir=graph_path, comment='Net')

    for epoch in range(1000):
        train_loss1_list, valid_loss1_list = [], []
        train_loss2_list, valid_loss2_list = [], []
        train_loss3_list, valid_loss3_list = [], []
        train_loss_list, valid_loss_list = [], []
        class_list, class_pred_list = [], []

        model.train()
        for i, (inputs, outputs) in enumerate(train_loader):
            t2, dwi, adc = inputs[0], inputs[1], inputs[2],
            ece, roi, prostate = np.squeeze(
                outputs[0],
                axis=1), outputs[1].to(device), outputs[2].to(device)

            inputs = torch.cat([t2, dwi, adc], axis=1)
            inputs = inputs.type(torch.FloatTensor).to(device)

            ece = np.argmax(ece, axis=1)
            ece = ece.type(torch.LongTensor).to(device)

            optimizer.zero_grad()

            roi_out, prostate_out, class_out, _ = model(inputs)
            class_out_softmax = nn.functional.softmax(class_out, dim=1)

            loss1 = seg_criterion1(roi_out, roi)
            loss2 = seg_criterion2(prostate_out, prostate)
            loss3 = cla_criterion(class_out, ece)
            loss = loss1 + loss2 + loss3

            loss.backward()
            optimizer.step()

            train_loss1 += loss1.item()
            train_loss1_list.append(loss1.item())

            train_loss2 += loss2.item()
            train_loss2_list.append(loss2.item())

            train_loss3 += loss3.item()
            train_loss3_list.append(loss3.item())

            train_loss += loss.item()
            train_loss_list.append(loss.item())

            # compute auc
            class_list.extend(list(ece.cpu().numpy()))
            class_pred_list.extend(
                list(class_out_softmax.cpu().detach().numpy()[..., 1]))

            if (i + 1) % 10 == 0:
                print(
                    'Epoch [%d / %d], Iter [%d], Cancer train Loss: %.4f, Prostate train Loss: %.4f, ECE train Loss: %.4f, Loss: %.4f'
                    % (epoch + 1, 1000, i + 1, train_loss1 / 10,
                       train_loss2 / 10, train_loss3 / 10, train_loss / 10))
                train_loss = 0.0
                train_loss1 = 0.0
                train_loss2 = 0.0
                train_loss3 = 0.0

        _, _, train_auc = get_auc(class_pred_list, class_list)
        class_list, class_pred_list = [], []

        model.eval()
        with torch.no_grad():
            for i, (inputs, outputs) in enumerate(validation_loader):
                t2, dwi, adc = inputs[0], inputs[1], inputs[2],
                ece, roi, prostate = np.squeeze(
                    outputs[0],
                    axis=1), outputs[1].to(device), outputs[2].to(device)

                inputs = torch.cat([t2, dwi, adc], axis=1)
                inputs = inputs.type(torch.FloatTensor).to(device)

                ece = np.argmax(ece, axis=1)
                ece = ece.type(torch.LongTensor).to(device)

                roi_out, prostate_out, class_out, _ = model(inputs)
                class_out_softmax = nn.functional.softmax(class_out, dim=1)

                loss1 = seg_criterion1(roi_out, roi)
                loss2 = seg_criterion2(prostate_out, prostate)
                loss3 = cla_criterion(class_out, ece)
                loss = loss1 + loss2 + loss3

                valid_loss1 += loss1.item()
                valid_loss1_list.append(loss1.item())

                valid_loss2 += loss2.item()
                valid_loss2_list.append(loss2.item())

                valid_loss3 += loss3.item()
                valid_loss3_list.append(loss3.item())

                valid_loss += loss.item()
                valid_loss_list.append(loss.item())

                # compute auc
                class_list.extend(list(ece.cpu().numpy()))
                class_pred_list.extend(
                    list(class_out_softmax.cpu().detach().numpy()[..., 1]))

                if (i + 1) % 10 == 0:
                    print(
                        'Epoch [%d / %d], Iter [%d], Cancer validation Loss: %.4f, Prostate validation Loss: %.4f, ECE validation Loss: %.4f, Loss: %.4f'
                        %
                        (epoch + 1, 1000, i + 1, valid_loss1 / 10,
                         valid_loss2 / 10, valid_loss3 / 10, valid_loss / 10))
                    valid_loss1 = 0.0
                    valid_loss2 = 0.0
                    valid_loss3 = 0.0
                    valid_loss = 0.0
            _, _, valid_auc = get_auc(class_pred_list, class_list)

        for index, (name, param) in enumerate(model.named_parameters()):
            if 'bn' not in name:
                # writer.add_histogram(name+'_grad', param.grad.cpu().data.numpy(), epoch+1)
                writer.add_histogram(name + '_data',
                                     param.cpu().data.numpy(), epoch + 1)

        writer.add_scalars(
            'Train_Val_Loss1', {
                'train_cancer_dice_loss': np.mean(train_loss1_list),
                'val_cancer_dice_loss': np.mean(valid_loss1_list)
            }, epoch + 1)
        writer.add_scalars(
            'Train_Val_Loss2', {
                'train_prostate_dice_loss': np.mean(train_loss2_list),
                'val_prostate_dice_loss': np.mean(valid_loss2_list)
            }, epoch + 1)
        writer.add_scalars(
            'Train_Val_Loss3', {
                'train_bce_loss': np.mean(train_loss3_list),
                'val_bce_loss': np.mean(valid_loss3_list)
            }, epoch + 1)
        writer.add_scalars(
            'Train_Val_Loss', {
                'train_loss': np.mean(train_loss_list),
                'val_loss': np.mean(valid_loss_list)
            }, epoch + 1)
        writer.add_scalars('Train_Val_auc', {
            'train_auc': train_auc,
            'val_auc': valid_auc
        }, epoch + 1)
        writer.close()

        print('Epoch:', epoch + 1,
              'Training Loss:', np.mean(train_loss_list), 'Valid Loss:',
              np.mean(valid_loss_list), 'Train auc:', train_auc, 'Valid auc:',
              valid_auc)

        scheduler.step(np.mean(valid_loss_list))
        early_stopping(sum(valid_loss_list) / len(valid_loss_list),
                       model,
                       save_path=model_folder,
                       evaluation=min)

        if early_stopping.early_stop:
            print("Early stopping")
            break
Пример #9
0
def ModelTest(weights_list=None, data_type=None):
    if data_type is None:
        data_type = ['alltrain', 'test']
    from Metric.classification_statistics import get_auc, draw_roc
    device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
    input_shape = (192, 192)
    batch_size = 2
    model_folder = model_root + '/ResNeXt_CBAM_CV_20201116_dis_five_lf'
    bc = BinaryClassification()

    fpr_list, tpr_list, auc_list = [], [], []
    for type in data_type:
        spliter = DataSpliter()
        sub_list = spliter.LoadName(data_root + '/{}-name.csv'.format(type))

        if type == 'test':
            data = DataManager(sub_list=sub_list)
            data.AddOne(Image2D(data_root + '/T2Slice/Test',
                                shape=input_shape))
            data.AddOne(
                Image2D(data_root + '/AdcSlice/Test', shape=input_shape))
            data.AddOne(
                Image2D(data_root + '/DwiSlice/Test', shape=input_shape))
            data.AddOne(
                Image2D(data_root + '/DistanceMap/Test',
                        shape=input_shape,
                        is_roi=True))
            data.AddOne(Feature(data_root + '/FiveClinicalbGS.csv'),
                        is_input=True)
            data.AddOne(Label(data_root + '/label.csv', label_tag='Positive'),
                        is_input=False)
            data_loader = DataLoader(data,
                                     batch_size=batch_size,
                                     shuffle=False)
        else:
            data = DataManager(sub_list=sub_list)
            data.AddOne(Image2D(data_root + '/T2Slice', shape=input_shape))
            data.AddOne(Image2D(data_root + '/AdcSlice', shape=input_shape))
            data.AddOne(Image2D(data_root + '/DwiSlice/', shape=input_shape))
            data.AddOne(
                Image2D(data_root + '/DistanceMap',
                        shape=input_shape,
                        is_roi=True))
            data.AddOne(Feature(data_root + '/FiveClinicalbGS.csv'),
                        is_input=True)
            data.AddOne(Label(data_root + '/label.csv', label_tag='Positive'),
                        is_input=False)
            data_loader = DataLoader(data,
                                     batch_size=batch_size,
                                     shuffle=False)

        cv_folder_list = [
            one
            for one in IterateCase(model_folder, only_folder=True, verbose=0)
        ]
        cv_pred_list, cv_label_list = [], []

        for cv_index, cv_folder in enumerate(cv_folder_list):
            model = ResNeXt(3, 2).to(device)
            if weights_list is None:
                one_fold_weights_list = [
                    one for one in IterateCase(
                        cv_folder, only_folder=False, verbose=0)
                    if one.is_file()
                ]
                one_fold_weights_list = sorted(
                    one_fold_weights_list,
                    key=lambda x: os.path.getctime(str(x)))
                weights_path = one_fold_weights_list[-1]
            else:
                weights_path = weights_list[cv_index]

            print(weights_path.name)
            model.load_state_dict(torch.load(str(weights_path)))

            pred_list, label_list = [], []
            model.eval()
            for inputs, outputs in data_loader:
                inputs = MoveTensorsToDevice(inputs, device)
                outputs = MoveTensorsToDevice(outputs, device)

                preds = model(*inputs)[:, 1]
                pred_list.extend((preds).cpu().data.numpy().squeeze().tolist())
                label_list.extend((
                    outputs).cpu().data.numpy().astype(int).squeeze().tolist())

            cv_pred_list.append(pred_list)
            cv_label_list.append(label_list)

            del model, weights_path

        cv_pred = np.array(cv_pred_list)
        cv_label = np.array(cv_label_list)
        mean_pred = np.mean(cv_pred, axis=0)
        mean_label = np.mean(cv_label, axis=0)
        np.save(
            os.path.join(
                r'/home/zhangyihong/Documents/ProstateECE/Result/NPY/AddClinical/FiveFeatureLFbGS',
                type + 'JSPH_FiveFeature_label.npy'), mean_label)
        np.save(
            os.path.join(
                r'/home/zhangyihong/Documents/ProstateECE/Result/NPY/AddClinical/FiveFeatureLFbGS',
                type + 'JSPH_FiveFeature_pred.npy'), mean_pred)

        fpr, tpr, auc = get_auc(mean_pred, mean_label)
        fpr_list.append(fpr)
        tpr_list.append(tpr)
        auc_list.append(auc)

        print(type)
        bc.Run(mean_pred.tolist(), mean_label.astype(int).tolist())

    draw_roc(fpr_list, tpr_list, auc_list, name_list=['alltrian', 'test'])
Пример #10
0
def ModelEnhancedSUH(weights_list=None, is_dismap=True):
    model_root = r'/home/zhangyihong/Documents/ProstateECE/Model'
    device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')

    if is_dismap:
        from SYECE.model import ResNeXt
        model_folder = model_root + '/ResNeXt_CBAM_CV_20200814'
    else:
        from SYECE.ModelWithoutDis import ResNeXt
        model_folder = model_root + '/ResNeXt_CBAM_CV_20200820'

    bc = BinaryClassification()

    cv_folder_list = [
        one for one in IterateCase(model_folder, only_folder=True, verbose=0)
    ]
    cv_pred_list, cv_label_list = [], []

    for cv_index, cv_folder in enumerate(cv_folder_list):
        model = ResNeXt(3, 2).to(device)
        if weights_list is None:
            one_fold_weights_list = [
                one
                for one in IterateCase(cv_folder, only_folder=False, verbose=0)
                if one.is_file()
            ]
            one_fold_weights_list = sorted(
                one_fold_weights_list, key=lambda x: os.path.getctime(str(x)))
            weights_path = one_fold_weights_list[-1]
        else:
            weights_path = weights_list[cv_index]

        print(weights_path.name)
        model.load_state_dict(torch.load(str(weights_path)))

        pred_list_enhanced, label_list_enhanced = [], []
        model.eval()
        for i in range(9):
            print(i)
            pred_list, label_list = [], []
            if i == 0:
                data_loader = EnhancedTestSUH(is_dismap, param_config)
            else:
                data_loader = EnhancedTestSUH(is_dismap)

            for inputs, outputs in data_loader:
                inputs = MoveTensorsToDevice(inputs, device)
                outputs = MoveTensorsToDevice(outputs, device)
                preds = model(*inputs)[:, 1]
                if isinstance(
                    (1 - preds).cpu().data.numpy().squeeze().tolist(), float):
                    if is_dismap:
                        pred_list.append(
                            (1 - preds).cpu().data.numpy().squeeze().tolist())
                        label_list.append(
                            (1 - outputs).cpu().data.numpy().astype(
                                int).squeeze().tolist())
                    else:
                        pred_list.append(
                            (preds).cpu().data.numpy().squeeze().tolist())
                        label_list.append((outputs).cpu().data.numpy().astype(
                            int).squeeze().tolist())
                else:
                    if is_dismap:
                        pred_list.extend(
                            (1 - preds).cpu().data.numpy().squeeze().tolist())
                        label_list.extend(
                            (1 - outputs).cpu().data.numpy().astype(
                                int).squeeze().tolist())
                    else:
                        pred_list.extend(
                            (preds).cpu().data.numpy().squeeze().tolist())
                        label_list.extend((outputs).cpu().data.numpy().astype(
                            int).squeeze().tolist())
            pred_list_enhanced.append(pred_list), label_list_enhanced.append(
                label_list)

        cv_pred_list.append(
            np.mean(np.array(pred_list_enhanced), axis=0).tolist())
        cv_label_list.append(
            np.mean(np.array(label_list_enhanced), axis=0).tolist())

        fpr, tpr, auc = get_auc(
            np.mean(pred_list_enhanced, axis=0).tolist(),
            np.mean(label_list_enhanced, axis=0).tolist())
        print('AUC: {}'.format(auc))
        del model, weights_path

    cv_pred = np.array(cv_pred_list)
    cv_label = np.array(cv_label_list)
    mean_pred = np.mean(cv_pred, axis=0)
    mean_label = np.mean(cv_label, axis=0)
    bc.Run(mean_pred.tolist(), mean_label.astype(int).tolist())
    return mean_pred, mean_label