Example #1
0
def compute_auc(test_gen, class_names, pred):
    Y_testc = to_categorical(test_gen.classes, num_classes=len(class_names))
    fpr = dict()
    tpr = dict()
    roc_auc = dict()
    for i in range(len(class_names)):
        fpr[i], tpr[i], _ = roc_curve(Y_testc[:, i], pred[:, i])
        roc_auc[i] = auc_roc(fpr[i], tpr[i])

    # micro calculate metrics globally considerig each element of the label indicator matrix as a label.
    #fpr, tpr, _ = roc_curve(Y_test, predicted)
    fpr["micro"], tpr["micro"], _ = roc_curve(Y_testc.ravel(), pred.ravel())
    roc_auc["micro"] = auc_roc(fpr["micro"], tpr["micro"])
    auc_score = roc_auc_score(Y_testc[:, i], pred[:, i], average='weighted')

    return roc_auc, auc_score, fpr, tpr
Example #2
0
        all_losses.append(l)
    #testing

    test=MyDataset(bags_ts)
    loader_ts=DataLoader(test, batch_size=1)
    predictions=[]

    for param in mlp.parameters():
        param.requires_grad =False
    for idx_ts, tsbag in enumerate(loader_ts):
        tsbag=tsbag.float()
        tsbag=cuda(Variable(tsbag))
        scores=mlp.forward(tsbag[0])

        predictions.append(float(torch.max(scores)))
    auc=auc_roc(y_ts, predictions)
    aucs.append(auc)
    print ('AUC=',auc)
    
    
    
    f, t, a=metrics.roc_curve(y_ts, predictions)
    AN=sum(x<0 for x in y_ts)
    AP=sum(x>0 for x in y_ts)
    TN=(1.0-f)*AN
    TP=t*AP
    Acc2=(TP+TN)/len(y_ts)
    acc=max(Acc2)
    print ('accuracy=',acc )
    accs.append(acc)
    
Example #3
0
#%%
fold_auc = []
skf = StratifiedKFold(n_splits=5)
skf.get_n_splits(data, lbl)
fold = 0
for tr_idx, ts_idx in skf.split(data, lbl):
    fold += 1
    ixtr, ixts, ytr, yts = data[tr_idx], data[ts_idx], lbl[tr_idx], lbl[ts_idx]
    test_auc = []

    model = Net().cuda()
    #e.g. input_model
    model.load_state_dict(torch.load('model_name' + str(fold) + '.pth'))

    predictions = []
    for ijk in range(len(ixts)):
        sc = []
        tsbag = ixts[ijk]

        for p in range(len(tsbag)):
            patch = torch.FloatTensor(tsbag[p])

            patch = patch.cuda()
            patch = Variable(patch)
            patch = patch.reshape([1, 3, 512, 512])
            sc.append(mlp.forward(patch).item())
        predictions.append(float(np.max(sc)))
    test_auc.append(auc_roc(yts, predictions))
fold_auc.append(max(test_auc))
print(fold_auc)
Example #4
0
#%%
from sklearn import svm
fold_auc = []
skf = StratifiedKFold(n_splits=5)
skf.get_n_splits(data, lbl)
pr_auc = []
fold = 0
for tr_idx, ts_idx in skf.split(data, lbl):
    fold += 1
    ixtr, ixts, ytr, yts, pxtr, pxts = data[tr_idx], data[ts_idx], lbl[
        tr_idx], lbl[ts_idx], gene[tr_idx], gene[ts_idx]

    pmodel = svm.SVC(kernel='rbf', gamma='auto', C=100)
    pmodel.fit(pxtr, ytr)
    pr_auc.append(auc_roc(yts, pmodel.decision_function(pxts)))

    criterion = hinge
    test_auc = []
    all_losses = []
    t = 0.30

    epochs = 150
    lupi = Net()
    lupi.cuda()
    optimizer = optim.Adam(lupi.parameters(), lr=0.001)
    max_auc = 0
    max_score = torch.FloatTensor([0]).cuda()

    for e in range(epochs):
        loss = 0.0
Example #5
0
from sklearn.metrics import roc_auc_score as auc_roc
import pandas as pd
from sklearn.model_selection import StratifiedKFold


file=pd.read_csv('./data_files/labels.csv')
lbl=l=np.array(file.iloc[:,0],dtype=np.float64)
file=pd.read_csv('./data_files/genes.csv')
gene=np.array(file)

#%%

from sklearn import svm
fold_auc=[]
skf = StratifiedKFold(n_splits=5)
skf.get_n_splits(gene,lbl)
pr_auc=[]
fold=0
for tr_idx, ts_idx in skf.split(gene,lbl):
    fold+=1
    ytr, yts,pxtr,pxts=lbl[tr_idx],lbl[ts_idx],gene[tr_idx],gene[ts_idx]
   
    
    pmodel = svm.SVC(kernel='rbf',gamma='auto', C=100)
    pmodel.fit(pxtr,ytr)
    fold_auc.append(auc_roc(yts,pmodel.decision_function(pxts)))

print(np.mean(fold_auc))  
    
    
    
Example #6
0
        optimizer.zero_grad()
        loss.backward(retain_graph=True)

        optimizer.step()
        all_losses.append(float(loss))

    y_val = []
    val_pred = []
    for batch_idx, (data, label) in enumerate(val_loader):
        y_val.append(float(float(label[0]) > 0))
        valbag = data.float()
        valbag = Variable(valbag).type(torch.cuda.FloatTensor)
        scores = cnn.forward(valbag)

        val_pred.append(float(torch.max(scores)))
    val_auc = auc_roc(y_val, val_pred)
    if e == 0:
        best_auc = val_auc
        best_cnn = deepcopy(cnn)
    if val_auc > best_auc:
        best_auc = val_auc
        best_cnn = deepcopy(cnn)

    avg_loss = l / count

    #    print ("current loss=",loss)
    print("best validation auc yet=", best_auc)

for param in best_cnn.parameters():
    param.requires_grad = False
predictions = []