def kullback_leibler_divergence(weights=1.0, name='KullbackLeiberDivergence', scope=None,
collect=False):
"""Adds a Kullback leiber diverenge loss to the training procedure.
Args:
name: name of the op.
scope: The scope for the operations performed in computing the loss.
collect: add to losses collection.
Returns:
A scalar `Tensor` representing the loss value.
Raises:
ValueError: If `predictions` shape doesn't match `labels` shape, or `weights` is `None`.
"""
def inner_loss(y_true, y_pred):
y_true = clip(y_true, EPSILON, 1)
y_pred = clip(y_pred, EPSILON, 1)
losses = tf.reduce_sum(input_tensor=y_true * tf.log(x=y_true / y_pred), axis=-1)
return losses
return built_loss(inner_loss, weights, name, scope, collect)
评论列表
文章目录