def crossentropy(y_pred, y_true, void_labels, one_hot=False):
# Clip predictions
y_pred = T.clip(y_pred, _EPSILON, 1.0 - _EPSILON)
if one_hot:
y_true = T.argmax(y_true, axis=1)
# Create mask
mask = T.ones_like(y_true, dtype=_FLOATX)
for el in void_labels:
mask = T.set_subtensor(mask[T.eq(y_true, el).nonzero()], 0.)
# Modify y_true temporarily
y_true_tmp = y_true * mask
y_true_tmp = y_true_tmp.astype('int32')
# Compute cross-entropy
loss = T.nnet.categorical_crossentropy(y_pred, y_true_tmp)
# Compute masked mean loss
loss *= mask
loss = T.sum(loss) / T.sum(mask)
return loss
评论列表
文章目录