layers.py 文件源码

python
阅读 23 收藏 0 点赞 0 评论 0

项目:vae-npvc 作者: JeremyCCHsu 项目源码 文件源码
def GaussianKLD(mu1, lv1, mu2, lv2):
    ''' Kullback-Leibler divergence of two Gaussians
        *Assuming that each dimension is independent
        mu: mean
        lv: log variance
        Equation: http://stats.stackexchange.com/questions/7440/kl-divergence-between-two-univariate-gaussians
    '''
    with tf.name_scope('GaussianKLD'):
        v1 = tf.exp(lv1)
        v2 = tf.exp(lv2)
        mu_diff_sq = tf.square(mu1 - mu2)
        dimwise_kld = .5 * (
            (lv2 - lv1) + tf.div(v1 + mu_diff_sq, v2 + EPSILON) - 1.)
        return tf.reduce_sum(dimwise_kld, -1)

# Verification by CMU's implementation
# http://www.cs.cmu.edu/~chanwook/MySoftware/rm1_Spk-by-Spk_MLLR/rm1_PNCC_MLLR_1/rm1/python/sphinx/divergence.py
# def gau_kl(pm, pv, qm, qv):
#     """
#     Kullback-Liebler divergence from Gaussian pm,pv to Gaussian qm,qv.
#     Also computes KL divergence from a single Gaussian pm,pv to a set
#     of Gaussians qm,qv.
#     Diagonal covariances are assumed.  Divergence is expressed in nats.
#     """
#     if (len(qm.shape) == 2):
#         axis = 1
#     else:
#         axis = 0
#     # Determinants of diagonal covariances pv, qv
#     dpv = pv.prod()
#     dqv = qv.prod(axis)
#     # Inverse of diagonal covariance qv
#     iqv = 1./qv
#     # Difference between means pm, qm
#     diff = qm - pm
#     return (0.5 *
#             (np.log(dqv / dpv)            # log |\Sigma_q| / |\Sigma_p|
#              + (iqv * pv).sum(axis)          # + tr(\Sigma_q^{-1} * \Sigma_p)
#              + (diff * iqv * diff).sum(axis) # + (\mu_q-\mu_p)^T\Sigma_q^{-1}(\mu_q-\mu_p)
#              - len(pm)))                     # - N
评论列表
文章目录


问题


面经


文章

微信
公众号

扫码关注公众号