def add_loss_op(self, preds):
"""Adds Ops for the loss function to the computational graph.
TODO: Compute averaged cross entropy loss for the predictions.
Importantly, you must ignore the loss for any masked tokens.
Hint: You might find tf.boolean_mask useful to mask the losses on masked tokens.
Hint: You can use tf.nn.sparse_softmax_cross_entropy_with_logits to simplify your
implementation. You might find tf.reduce_mean useful.
Args:
pred: A tensor of shape (batch_size, max_length, n_classes) containing the output of the neural
network before the softmax layer.
Returns:
loss: A 0-d tensor (scalar)
"""
### YOUR CODE HERE (~2-4 lines)
loss = tf.reduce_mean(tf.boolean_mask(tf.nn.sparse_softmax_cross_entropy_with_logits(labels = self.labels_placeholder,
logits = preds), self.mask_placeholder))
### END YOUR CODE
return loss
评论列表
文章目录