def generator(self, opts, noise, is_training, reuse=False):
with tf.variable_scope("GENERATOR", reuse=reuse):
h0 = ops.linear(opts, noise, 100, scope='h0_lin')
h0 = ops.batch_norm(opts, h0, is_training, reuse, scope='bn_layer1', scale=False)
h0 = tf.nn.softplus(h0)
h1 = ops.linear(opts, h0, 100, scope='h1_lin')
h1 = ops.batch_norm(opts, h1, is_training, reuse, scope='bn_layer2', scale=False)
h1 = tf.nn.softplus(h1)
h2 = ops.linear(opts, h1, 28 * 28, scope='h2_lin')
# h2 = ops.batch_norm(opts, h2, is_training, reuse, scope='bn_layer3')
h2 = tf.reshape(h2, [-1, 28, 28, 1])
if opts['input_normalize_sym']:
return tf.nn.tanh(h2)
else:
return tf.nn.sigmoid(h2)
评论列表
文章目录