def binary_crossentropy(logit, y, mask, length_var):
logit_shp = logit.shape
# logit_shp[2] == 1
# n_labels is 1
# (n_samples, n_timesteps_f, n_labels)
# softmax, predict label prob
# (n_samples * n_timesteps_f, n_labels)
probs = T.nnet.sigmoid(logit.flatten())
# (n_samples * n_timesteps_f)
y_flat = y.flatten()
loss = lasagne.objectives.binary_crossentropy(probs, y_flat)
# (n_samples, n_timesteps_f)
loss = loss.reshape((logit_shp[0], logit_shp[1]))
loss = loss * mask
loss = T.sum(loss, axis=1) / length_var
# (n_samples, n_timesteps_f)
probs = probs.reshape([logit_shp[0], logit_shp[1]])
return loss, probs
评论列表
文章目录