Beispiel #1
0
    def test_confusion_matrix(self):

        try:
            from sklearn.metrics import confusion_matrix as skcm
        except:
            unittest.TestCase.skipTest(self, "sklearn is not found in the libraries")

        skcm_matrix1 = skcm(self.local_class1.target, self.local_class1.p_target)
        skcm_matrix2 = skcm(self.local_class1.target, self.local_class1.p_target, labels=[1, 3, 4])
        
        dlpycm_matrix1 = confusion_matrix(self.class_table1.target, self.class_table1.p_target)
        dlpycm_matrix2 = confusion_matrix(self.class_table1.target, self.class_table1.p_target, labels=[1, 3, 4])
        
        self.assertTrue(np.array_equal(skcm_matrix1, dlpycm_matrix1.values))
        self.assertTrue(np.array_equal(skcm_matrix2, dlpycm_matrix2.values))
        
        skcm_matrix3 = skcm(self.local_class1.target, self.local_class2.p_target)
        skcm_matrix4 = skcm(self.local_class1.target, self.local_class2.p_target, labels=[1, 3, 4])
        
        dlpycm_matrix3 = confusion_matrix(self.class_table1.target, self.class_table2.p_target,
                                          id_vars=['id1', 'id2'])
        dlpycm_matrix4 = confusion_matrix(self.class_table1.target, self.class_table2.p_target, 
                                          labels=[1, 3, 4], id_vars=['id1', 'id2'])
        
        self.assertTrue(np.array_equal(skcm_matrix3, dlpycm_matrix3.values))
        self.assertTrue(np.array_equal(skcm_matrix4, dlpycm_matrix4.values))
        
        dlpycm_matrix5 = confusion_matrix('target', 'p_target', castable=self.class_table1)
        dlpycm_matrix6 = confusion_matrix('target', 'p_target', castable=self.class_table1, labels=[1, 3, 4])
        
        self.assertTrue(np.array_equal(skcm_matrix1, dlpycm_matrix5.values))
        self.assertTrue(np.array_equal(skcm_matrix2, dlpycm_matrix6.values))
Beispiel #2
0
    def test_confusion_matrix(self):

        try:
            from sklearn.metrics import confusion_matrix as skcm
        except:
            unittest.TestCase.skipTest(self, "sklearn is not found in the libraries")

        local_class1 = self.class_table1.to_frame()
        skcm_matrix1 = skcm(local_class1.target, local_class1.p_target)
        skcm_matrix2 = skcm(local_class1.target, local_class1.p_target, labels=[1, 3, 4])
        
        dlpycm_matrix1 = confusion_matrix(self.class_table1, 'target', 'p_target')
        dlpycm_matrix2 = confusion_matrix(self.class_table1, 'target', 'p_target', labels=[1, 3, 4])
        
        self.assertTrue(np.array_equal(skcm_matrix1, dlpycm_matrix1.values))
        self.assertTrue(np.array_equal(skcm_matrix2, dlpycm_matrix2.values))
Beispiel #3
0
def confusion_matrix(*args):
    info_for_eval = args[0]
    predictions = detach_tensor(info_for_eval['predictions'])
    ground_truth = detach_tensor(info_for_eval['ground_truth'])
    predicted = np.argmax(predictions, axis=1)
    cm = skcm(ground_truth, predicted)
    cm_plot = plot_confusion_matrix(cm)

    return 'image', f'confusion_matrix', cm_plot
Beispiel #4
0
#train/test fitting and validation
model.fit(x_train,y_train)
model.score(x_test,y_test)
proba=model.predict_proba(x_test)
pred=model.predict(x_test)
s= cross_val_score(model,x_test,y_test, cv=12)
s.mean()
s.std()

#The f-1 scores show that our model does a fairly decent job of predicting those
#who died and an okay job predicting those who survived
print skcr(y_test,pred)

#(true negative) (false positive)
#(false negative) (true positive)
print skcm(y_test,pred)

#Cant remember the better way to parse through this
probs=[]
for item in proba:
    probs.append(item[1])

#building ROC
false_positive_rate, true_positive_rate, thresholds = skrc(y_test,probs)
roc_auc = auc(false_positive_rate, true_positive_rate)
#plotting ROC
plt.title('ROC')
plt.plot(false_positive_rate, true_positive_rate, 'r', label=roc_auc)
plt.legend(loc='lower right')
plt.show()