-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathlosses.py
179 lines (152 loc) · 6.43 KB
/
losses.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
from keras.losses import binary_crossentropy
import keras.backend as K
from matplotlib.pyplot import imsave
from skimage.measure import label
from scipy.ndimage.morphology import distance_transform_edt
import numpy as np
def weight_map(y_true,wc=None,w0=1,sigma=12):
y = y_true.astype(bool)
labels = label(y)
no_labels = labels == 0
label_ids = sorted(np.unique(labels))[1:]
if len(label_ids) > 1:
distances = np.zeros((y.shape[0], y.shape[1], len(label_ids)))
for i, label_id in enumerate(label_ids):
distances[:,:,i] = distance_transform_edt(labels != label_id)
distances = np.sort(distances, axis=2)
d1 = distances[:,:,0]
d2 = distances[:,:,1]
w = w0 * np.exp(-1/2*((d1 + d2) / sigma)**2) * no_labels
if wc:
class_weights = np.zeros_like(y)
for k, v in wc.items():
class_weights[y == k] = v
w = w + class_weights
else:
w = np.zeros_like(y)
return w
def iou_score(y_true, y_pred, class_weights=1., smooth=1e-12, per_image=True):
#https://github.com/qubvel/segmentation_models/blob/master/segmentation_models/metrics.py
r""" The `Jaccard index`_, also known as Intersection over Union and the Jaccard similarity coefficient
(originally coined coefficient de communauté by Paul Jaccard), is a statistic used for comparing the
similarity and diversity of sample sets. The Jaccard coefficient measures similarity between finite sample sets,
and is defined as the size of the intersection divided by the size of the union of the sample sets:
.. math:: J(A, B) = \frac{A \cap B}{A \cup B}
Args:
gt: ground truth 4D keras tensor (B, H, W, C)
pr: prediction 4D keras tensor (B, H, W, C)
class_weights: 1. or list of class weights, len(weights) = C
smooth: value to avoid division by zero
per_image: if ``True``, metric is calculated as mean over images in batch (B),
else over whole batch
Returns:
IoU/Jaccard score in range [0, 1]
.. _`Jaccard index`: https://en.wikipedia.org/wiki/Jaccard_index
"""
if per_image:
axes = [1, 2]
else:
axes = [0, 1, 2]
intersection = K.sum(y_true * y_pred, axis=axes)
union = K.sum(y_true + y_pred, axis=axes) - intersection
iou = (intersection + smooth) / (union + smooth)
# mean per image
if per_image:
iou = K.mean(iou, axis=0)
# weighted mean per class
iou = K.mean(iou * class_weights)
return iou
def iou(y_true,y_pred,smooth=1):
intersection = K.sum(K.abs(y_true * y_pred), axis=-1)
union = (K.sum(y_true,-1) + K.sum(y_pred,-1)) - intersection
iou = (intersection + smooth) / ( union + smooth)
return iou
def iou_loss(y_true,y_pred):
return -iou_score(y_true,y_pred)
def dice_coeff(y_true, y_pred):
smooth = 1.
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
intersection = K.sum(y_true_f * y_pred_f)
score = (2. * intersection + smooth) / (K.sum(y_true_f) + K.sum(y_pred_f) + smooth)
return score #Scalar
def bce_loss(y_true,y_pred):
# From: https://github.com/keras-team/keras/blob/master/keras/losses.py
return K.mean(K.binary_crossentropy(y_true, y_pred), axis=-1)
def dice_loss(y_true, y_pred):
loss = 1 - dice_coeff(y_true, y_pred)
return loss
def bce_dice_weighted_loss(y_true,y_pred):
# TODO: Add weight map to binary_crossentropy
#This wont work:
loss = bce_dice_loss(y_true,y_pred) * weight_map(y_true)
# TODO: Test this:
print(loss.shape)
imsave("weighted_loss_test.png",loss)
return loss
def bce_dice_loss(y_true, y_pred):
loss = binary_crossentropy(y_true, y_pred) + dice_loss(y_true, y_pred)
return loss #Scalar
def weighted_dice_coeff(y_true, y_pred, weight):
smooth = 1.
w, m1, m2 = weight * weight, y_true, y_pred
intersection = (m1 * m2)
score = (2. * K.sum(w * intersection) + smooth) / (K.sum(w * m1) + K.sum(w * m2) + smooth)
return score
def weighted_dice_loss(y_true, y_pred):
y_true = K.cast(y_true, 'float32')
y_pred = K.cast(y_pred, 'float32')
# if we want to get same size of output, kernel size must be odd number
if K.int_shape(y_pred)[1] == 128:
kernel_size = 11
elif K.int_shape(y_pred)[1] == 256:
kernel_size = 21
elif K.int_shape(y_pred)[1] == 512:
kernel_size = 21
elif K.int_shape(y_pred)[1] == 1024:
kernel_size = 41
else:
raise ValueError('Unexpected image size')
averaged_mask = K.pool2d(
y_true, pool_size=(kernel_size, kernel_size), strides=(1, 1), padding='same', pool_mode='avg')
border = K.cast(K.greater(averaged_mask, 0.005), 'float32') * K.cast(K.less(averaged_mask, 0.995), 'float32')
weight = K.ones_like(averaged_mask)
w0 = K.sum(weight)
weight += border * 2
w1 = K.sum(weight)
weight *= (w0 / w1)
loss = 1 - weighted_dice_coeff(y_true, y_pred, weight)
return loss
def weighted_bce_loss(y_true, y_pred, weight):
# avoiding overflow
epsilon = 1e-7
y_pred = K.clip(y_pred, epsilon, 1. - epsilon)
logit_y_pred = K.log(y_pred / (1. - y_pred))
# https://www.tensorflow.org/api_docs/python/tf/nn/weighted_cross_entropy_with_logits
loss = (1. - y_true) * logit_y_pred + (1. + (weight - 1.) * y_true) * \
(K.log(1. + K.exp(-K.abs(logit_y_pred))) + K.maximum(-logit_y_pred, 0.))
return K.sum(loss) / K.sum(weight)
def weighted_bce_dice_loss(y_true, y_pred):
y_true = K.cast(y_true, 'float32')
y_pred = K.cast(y_pred, 'float32')
# if we want to get same size of output, kernel size must be odd number
if K.int_shape(y_pred)[1] == 128:
kernel_size = 11
elif K.int_shape(y_pred)[1] == 256:
kernel_size = 21
elif K.int_shape(y_pred)[1] == 512:
kernel_size = 21
elif K.int_shape(y_pred)[1] == 1024:
kernel_size = 41
else:
raise ValueError('Unexpected image size')
averaged_mask = K.pool2d(
y_true, pool_size=(kernel_size, kernel_size), strides=(1, 1), padding='same', pool_mode='avg')
border = K.cast(K.greater(averaged_mask, 0.005), 'float32') * K.cast(K.less(averaged_mask, 0.995), 'float32')
weight = K.ones_like(averaged_mask)
w0 = K.sum(weight)
weight += border * 2
w1 = K.sum(weight)
weight *= (w0 / w1)
loss = weighted_bce_loss(y_true, y_pred, weight) + (1 - weighted_dice_coeff(y_true, y_pred, weight))
return loss