def manual_loss(self, logits, targets, K):
""" Computes the Gaussian Mixture Loss out of the graph computation """
mixs, sigmas, means = logits[:, 0:K], logits[:, K:2*K], logits[:, 2*K:]
mixs = np.exp(mixs)/np.sum(np.exp(mixs)) # Apply softmax
sigmas = np.exp(sigmas)
# Compute over all instances
logexps = []
for i in range(self.batch_size):
sumexp = np.sum(
[
mixs[:, k] *
norm.pdf(targets[i], means[i, k], sigmas[i, k])
for k in range(K)
]
)
logexps.append(np.log(sumexp))
return -np.mean(logexps)
评论列表
文章目录