-
Notifications
You must be signed in to change notification settings - Fork 18
/
evaluation_metrics.py
33 lines (26 loc) · 1.1 KB
/
evaluation_metrics.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
from tensorflow.keras import backend as K
from sklearn.metrics import jaccard_score,confusion_matrix
def IoU_coef(y_true, y_pred):
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
intersection = K.sum(y_true_f * y_pred_f)
return (intersection + 1.0) / (K.sum(y_true_f) + K.sum(y_pred_f) - intersection + 1.0)
def IoU_loss(y_true, y_pred):
return -IoU_coef(y_true, y_pred)
def dice_coef(y_true, y_pred):
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
intersection = K.sum(y_true_f * y_pred_f)
return (2.0 * intersection + 1.0) / (K.sum(y_true_f) + K.sum(y_pred_f) + 1.0)
def dice_coef_loss(y_true, y_pred):
return -dice_coef(y_true, y_pred)
def accuracy(y_true, y_pred):
cm = confusion_matrix(y_true.flatten(),y_pred.flatten(), labels=[0, 1])
acc = (cm[0,0]+cm[1,1])/(cm[0,0]+cm[0,1]+cm[1,0]+cm[1,1])
return acc
def IoU(y_true, y_pred, labels = [0, 1]):
IoU = []
for label in labels:
jaccard = jaccard_score(y_pred.flatten(),y_true.flatten(), pos_label=label, average='weighted')
IoU.append(jaccard)
return np.mean(IoU)