-
Notifications
You must be signed in to change notification settings - Fork 2
/
metrics.py
77 lines (56 loc) · 2.97 KB
/
metrics.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
from keras import backend as K
smooth = 1e-5
def precision(y_true, y_pred):
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
true_positives = K.sum(K.round(K.clip(y_true_f * y_pred_f, 0, 1)))
predicted_positives = K.sum(K.round(K.clip(y_pred_f, 0, 1)))
return true_positives / (predicted_positives + K.epsilon())
def recall(y_true, y_pred):
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
true_positives = K.sum(K.round(K.clip(y_true_f * y_pred_f, 0, 1)))
possible_positives = K.sum(K.round(K.clip(y_true_f, 0, 1)))
return true_positives / (possible_positives + K.epsilon())
def f1_score(y_true, y_pred):
return 2. / (1. / recall(y_true, y_pred) + 1. / precision(y_true, y_pred))
def dice_coef(y_true, y_pred):
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
intersection = K.sum(y_true_f * y_pred_f)
return (2. * intersection + smooth) / (K.sum(y_true_f) + K.sum(y_pred_f) + smooth)
def mean_iou(y_true, y_pred, smooth=None, axis=-1):
"""Jaccard distance for semantic segmentation, also known as the intersection-over-union loss.
This loss is useful when you have unbalanced numbers of pixels within an image
because it gives all classes equal weight. However, it is not the defacto
standard for image segmentation.
For example, assume you are trying to predict if each pixel is cat, dog, or background.
You have 80% background pixels, 10% dog, and 10% cat. If the model predicts 100% background
should it be be 80% right (as with categorical cross entropy) or 30% (with this loss)?
The loss has been modified to have a smooth gradient as it converges on zero.
This has been shifted so it converges on 0 and is smoothed to avoid exploding
or disappearing gradient.
Also see jaccard which takes a slighty different approach.
Jaccard = (|X & Y|)/ (|X|+ |Y| - |X & Y|)
= sum(|A*B|)/(sum(|A|)+sum(|B|)-sum(|A*B|))
# References
Csurka, Gabriela & Larlus, Diane & Perronnin, Florent. (2013).
What is a good evaluation measure for semantic segmentation?.
IEEE Trans. Pattern Anal. Mach. Intell.. 26. . 10.5244/C.27.32.
https://en.wikipedia.org/wiki/Jaccard_index
"""
if smooth is None:
smooth = K.epsilon()
pred_shape = K.shape(y_pred)
true_shape = K.shape(y_true)
# reshape such that w and h dim are multiplied together
y_pred_reshaped = K.reshape(y_pred, (-1, pred_shape[-1]))
y_true_reshaped = K.reshape(y_true, (-1, true_shape[-1]))
# correctly classified
clf_pred = K.one_hot(K.argmax(y_pred_reshaped), num_classes=true_shape[-1])
equal_entries = K.cast(K.equal(clf_pred, y_true_reshaped), dtype='float32') * y_true_reshaped
intersection = K.sum(equal_entries, axis=1)
union_per_class = K.sum(y_true_reshaped, axis=1) + K.sum(y_pred_reshaped, axis=1)
# smooth added to avoid dividing by zero
iou = (intersection + smooth) / ((union_per_class - intersection) + smooth)
return K.mean(iou)