-
Notifications
You must be signed in to change notification settings - Fork 0
/
general_utils.py
63 lines (50 loc) · 2.91 KB
/
general_utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Jul 1 11:39:53 2019
@author: gracepetrosini
"""
import pandas as pd
from sklearn import metrics
# =============================================================================
# =============================================================================
def compute_metrics(y_true_cts, y_pred_cts, y_true_bin, y_pred_bin, y_pred_score = None):
#Linear Regression metrics
regression_dict = {}
if y_pred_cts is not None:
y_true = y_true_cts
y_pred = y_pred_cts
regression_dict['explained_variance_score'] = metrics.explained_variance_score(y_true, y_pred)
#regression_dict['max_error'] = metrics.max_error(y_true, y_pred)
regression_dict['mean_absolute_error'] = metrics.mean_absolute_error(y_true, y_pred)
regression_dict['mean_squared_error'] = metrics.mean_squared_error(y_true, y_pred)
#regression_dict['mean_squared_log_error'] = metrics.mean_squared_log_error(y_true, y_pred)
regression_dict['median_absolute_error'] = metrics.median_absolute_error(y_true, y_pred)
regression_dict['r2'] = metrics.r2_score(y_true, y_pred)
#create DataFrame
regression_metrics = pd.DataFrame.from_dict(regression_dict, orient = 'index')
# =============================================================================
#Classification metrics
classification_dict = {}
if y_pred_bin is not None:
y_true = y_true_bin
y_pred = y_pred_bin
classification_dict['accuracy_score'] = metrics.accuracy_score(y_true, y_pred)
#classification_dict['avg_ps'] = metrics.average_precision_score(y_true, y_score)
classification_dict['confusion_matrix'] = metrics.confusion_matrix(y_true, y_pred)
classification_dict['f1_score'] = metrics.f1_score(y_true, y_pred)
classification_dict['precision_score'] = metrics.precision_score(y_true, y_pred)
classification_dict['recall_score'] = metrics.recall_score(y_true, y_pred)
if y_pred_score is None:
y_pred_score = y_pred
classification_dict['roc_auc_score'] = metrics.roc_auc_score(y_true, y_pred_score)
#classification_dict['roc_curve'] = metrics.roc_curve(y_true, y_score)
classification_dict['gini'] = 2*classification_dict['roc_auc_score']-1
classification_dict['sensibility'] = classification_dict['confusion_matrix'][1,1]/sum(classification_dict['confusion_matrix'][1,:])
classification_dict['specificity'] = classification_dict['confusion_matrix'][0,0]/sum(classification_dict['confusion_matrix'][0,:])
#create DataFrame
classification_metrics = pd.DataFrame.from_dict(classification_dict, orient = 'index')
# =============================================================================
print(classification_metrics)
print(regression_metrics)
return regression_metrics, classification_metrics