def _generator(self, z, y, is_training):
''' In this version, we only generate the target, so `y` is useless '''
subnet = self.arch['generator']
n_layer = len(subnet['output'])
h, w, c = subnet['hwc']
# y = tf.nn.embedding_lookup(self.y_emb, y)
x = self._merge([z, y], subnet['merge_dim'])
x = lrelu(x)
with slim.arg_scope(
[slim.batch_norm],
scale=True, scope='BN',
updates_collections=None,
# decay=0.9, epsilon=1e-5,
is_training=is_training):
x = slim.fully_connected(
x,
h * w * c,
normalizer_fn=slim.batch_norm,
activation_fn=lrelu)
x = tf.reshape(x, [-1, h, w, c])
with slim.arg_scope(
[slim.conv2d_transpose],
weights_regularizer=slim.l2_regularizer(subnet['l2-reg']),
normalizer_fn=slim.batch_norm,
activation_fn=lrelu):
for i in range(n_layer -1):
x = slim.conv2d_transpose(
x,
subnet['output'][i],
subnet['kernel'][i],
subnet['stride'][i]
# normalizer_fn=None
)
# Don't apply BN for the last layer of G
x = slim.conv2d_transpose(
x,
subnet['output'][-1],
subnet['kernel'][-1],
subnet['stride'][-1],
normalizer_fn=None,
activation_fn=None)
# pdb.set_trace()
logit = x
# x = tf.nn.tanh(logit)
# return x, logit
return tf.nn.sigmoid(logit), logit
gvae.py 文件源码
python
阅读 21
收藏 0
点赞 0
评论 0
评论列表
文章目录