def entropy(self):
# Differential entropy of Gaussian is:
# 0.5 * (log(2 * pi * var) + 1)
# = 0.5 * (log(2 * pi) + log var + 1)
with chainer.force_backprop_mode():
return 0.5 * self.mean.data.shape[1] * (np.log(2 * np.pi) + 1) + \
0.5 * F.sum(self.ln_var, axis=1)
评论列表
文章目录