def tf_parameterize(self, x):
# Softplus to ensure alpha and beta >= 1
# epsilon < 1.0, hence negative
log_eps = log(util.epsilon)
alpha = self.alpha.apply(x=x)
alpha = tf.clip_by_value(t=alpha, clip_value_min=log_eps, clip_value_max=-log_eps)
alpha = tf.log(x=(tf.exp(x=alpha) + 1.0)) + 1.0
beta = self.beta.apply(x=x)
beta = tf.clip_by_value(t=beta, clip_value_min=log_eps, clip_value_max=-log_eps)
beta = tf.log(x=(tf.exp(x=beta) + 1.0)) + 1.0
shape = (-1,) + self.shape
alpha = tf.reshape(tensor=alpha, shape=shape)
beta = tf.reshape(tensor=beta, shape=shape)
alpha_beta = tf.maximum(x=(alpha + beta), y=util.epsilon)
log_norm = tf.lgamma(x=alpha) + tf.lgamma(x=beta) - tf.lgamma(x=alpha_beta)
return alpha, beta, alpha_beta, log_norm
评论列表
文章目录