forked from akirasosa/mobile-semantic-segmentation
-
Notifications
You must be signed in to change notification settings - Fork 0
/
loss.py
47 lines (28 loc) · 1.22 KB
/
loss.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import numpy as np
from keras import backend as K
smooth = 1e-5
def precision(y_true, y_pred):
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
true_positives = K.sum(K.round(K.clip(y_true_f * y_pred_f, 0, 1)))
predicted_positives = K.sum(K.round(K.clip(y_pred_f, 0, 1)))
return true_positives / (predicted_positives + K.epsilon())
def recall(y_true, y_pred):
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
true_positives = K.sum(K.round(K.clip(y_true_f * y_pred_f, 0, 1)))
possible_positives = K.sum(K.round(K.clip(y_true_f, 0, 1)))
return true_positives / (possible_positives + K.epsilon())
def f1_score(y_true, y_pred):
return 2. / (1. / recall(y_true, y_pred) + 1. / precision(y_true, y_pred))
def dice_coef(y_true, y_pred):
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
intersection = K.sum(y_true_f * y_pred_f)
return (2. * intersection + smooth) / (K.sum(y_true_f) + K.sum(y_pred_f) + smooth)
def dice_coef_loss(y_true, y_pred):
return -dice_coef(y_true, y_pred)
def np_dice_coef(y_true, y_pred):
tr = y_true.flatten()
pr = y_pred.flatten()
return (2. * np.sum(tr * pr) + smooth) / (np.sum(tr) + np.sum(pr) + smooth)