-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathevaluation.py
50 lines (36 loc) · 1.57 KB
/
evaluation.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
import numpy as np
def _fast_hist(label_pred, label_true, num_classes):
mask = (label_true >= 0) & (label_true < num_classes)
hist = np.bincount(
num_classes * label_true[mask].astype(int) +
label_pred[mask], minlength=num_classes ** 2).reshape(num_classes, num_classes)
return hist
def confusion_matrix(predictions, gts, num_classes):
hist = np.zeros((num_classes, num_classes))
for lp, lt in zip(predictions, gts):
hist += _fast_hist(lp.flatten(), lt.flatten(), num_classes)
return hist
def kappa_score(confusion):
n_classes = confusion.shape[0]
sum0 = np.sum(confusion, axis=0)
sum1 = np.sum(confusion, axis=1)
expected = np.outer(sum0, sum1) / np.sum(sum0)
w_mat = np.ones([n_classes, n_classes], dtype=np.int)
w_mat.flat[:: n_classes + 1] = 0
k = np.sum(w_mat * confusion) / np.sum(w_mat * expected)
return 1 - k
def evaluate(predictions, gts, num_classes):
hist = np.zeros((num_classes, num_classes))
for lp, lt in zip(predictions, gts):
hist += _fast_hist(lp.flatten(), lt.flatten(), num_classes)
#print(hist)
# axis 0: gt, axis 1: prediction
acc = np.diag(hist).sum() / hist.sum()
acc_cls = np.diag(hist) / hist.sum(axis=1)
acc_cls = np.nanmean(acc_cls)
iu = np.diag(hist) / (hist.sum(axis=1) + hist.sum(axis=0) - np.diag(hist))
mean_iu = np.nanmean(iu)
freq = hist.sum(axis=1) / hist.sum()
fwavacc = (freq[freq > 0] * iu[freq > 0]).sum()
kappa = kappa_score(hist)
return acc, acc_cls, mean_iu, iu, fwavacc, kappa