def discriminator_cramer_test(self, opts, input_):
"""Deterministic discriminator using Cramer von Mises Test.
"""
add_dim = opts['z_test_proj_dim']
if add_dim > 0:
dim = int(input_.get_shape()[1])
proj = np.random.rand(dim, add_dim)
proj = proj - np.mean(proj, 0)
norms = np.sqrt(np.sum(np.square(proj), 0) + 1e-5)
proj = tf.constant(proj / norms, dtype=tf.float32)
projected_x = tf.matmul(input_, proj) # Shape [batch_size, add_dim].
# Shape [batch_size, z_dim+add_dim]
all_dims_x = tf.concat([input_, projected_x], 1)
else:
all_dims_x = input_
# top_k can only sort on the last dimension and we want to sort the
# first one (batch_size).
batch_size = self.get_batch_size(opts, all_dims_x)
transposed = tf.transpose(all_dims_x, perm=[1, 0])
values, indices = tf.nn.top_k(transposed, k=tf.cast(batch_size, tf.int32))
values = tf.reverse(values, [1])
#values = tf.Print(values, [values], "sorted values")
normal_dist = tf.contrib.distributions.Normal(0., float(opts['pot_pz_std']))
#
normal_cdf = normal_dist.cdf(values)
#normal_cdf = tf.Print(normal_cdf, [normal_cdf], "normal_cdf")
expected = (2 * tf.range(1, batch_size+1, 1, dtype="float") - 1) / (2.0 * batch_size)
#expected = tf.Print(expected, [expected], "expected")
# We don't use the constant.
# constant = 1.0 / (12.0 * batch_size * batch_size)
# stat = constant + tf.reduce_sum(tf.square(expected - normal_cdf), 1) / batch_size
stat = tf.reduce_sum(tf.square(expected - normal_cdf), 1) / batch_size
stat = tf.reduce_mean(stat)
#stat = tf.Print(stat, [stat], "stat")
return stat
评论列表
文章目录