-
Notifications
You must be signed in to change notification settings - Fork 0
/
metrics.py
43 lines (35 loc) · 1.31 KB
/
metrics.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
from sklearn import metrics
import matplotlib.pyplot as plt
import numpy
#deprecated, don't use!
def basicTextMetrics(name, testX, testY, yPred, yProb):
print('\n' + name + ' AUROC:', metrics.roc_auc_score(testY, yProb))
print('\n' + name + ' precision:', metrics.precision_score(testY, yPred,
average = 'binary'))
print('\n' + name + ' recall:', metrics.recall_score(testY, yPred,
average = 'binary'))
print('\n' + name + ' accuracy:', metrics.accuracy_score(testY, yPred))
return
#deprecated, don't use!
def aurocGraph(name, testX, testY, yPred, yProb):
auroc_val = metrics.roc_auc_score(testY, yProb)
print('auroc_val:', auroc_val)
fpr, tpr, thresholds = metrics.roc_curve(testY, yProb, 1)
plt.title(name + ' ROC curve')
plt.plot(fpr, tpr)
plt.show()
return auroc_val
def confusionMatrix(name, testX, testY, yPred, yProb):
matrix = numpy.asarray(metrics.confusion_matrix(testY, yPred,
[1, 0]))
print('\nConfusion Matrix:\n',
matrix, '\n')
return matrix
#------------------------------------
#below this line are only metrics, no print
def auroc(testX, testY, yPred, yProb):
return metrics.roc_auc_score(testY, yProb)
fast_accuracy = lambda pred, label: sum([1 if p == l else 0
for p, l in zip(pred, label)]) / len(label)
def accuracy(testX, testY, yPred, yProb):
return fast_accuracy(yPred, testY)