-
Notifications
You must be signed in to change notification settings - Fork 3
/
performance_metrics.py
50 lines (31 loc) · 1.11 KB
/
performance_metrics.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
# Wrote my own implementations of performance metrics just for fun
# Feel free to use the scikit-learn functions
import numpy as np
# Valid only for two class cases
def confusion_matrix(target, predicted, num_class):
target_nb = target
predicted_nb = predicted
num_trainset = target_nb.size
cm = np.zeros((num_class, num_class))
for i in range(num_trainset):
cm[target_nb[i], predicted_nb[i]] += 1
return cm
def accuracy(target, predicted):
temp = target - predicted
non_zero = np.count_nonzero(temp)
zero = target.shape[0] - non_zero
return float(zero) / target.shape[0]
def precision(target, predicted, num_class):
num_trainset = target.shape[0]
cm = confusion_matrix(target, predicted, num_class)
p = np.zeros((num_class, 1))
for i in range(num_class):
p[i] = cm[i, i]/np.sum(cm[:, i])
return np.mean(p)
def recall(target, predicted, num_class):
num_trainset = target.shape[0]
cm = confusion_matrix(target, predicted, num_class)
r = np.zeros((num_class, 1))
for i in range(num_class):
r[i] = cm[i, i]/np.sum(cm[i, :])
return np.mean(r)