def encoder(x,y):
y_dim = int(y.get_shape().as_list()[-1])
# reshape so it's batchx1x1xy_size
y = tf.reshape(y, shape=[BATCH_SIZE, 1, 1, y_dim])
input_ = conv_cond_concat(x, y)
conv1 = tcl.conv2d(input_, 64, 4, 2, activation_fn=tf.identity, normalizer_fn=tcl.batch_norm, weights_initializer=tf.random_normal_initializer(stddev=0.02), scope='g_enc_conv1')
conv1 = lrelu(conv1)
conv1 = conv_cond_concat(conv1, y)
conv2 = tcl.conv2d(conv1, 128, 4, 2, activation_fn=tf.identity, normalizer_fn=tcl.batch_norm, weights_initializer=tf.random_normal_initializer(stddev=0.02), scope='g_enc_conv2')
conv2 = lrelu(conv2)
conv2 = conv_cond_concat(conv2, y)
conv3 = tcl.conv2d(conv2, 256, 4, 2, activation_fn=tf.identity, normalizer_fn=tcl.batch_norm, weights_initializer=tf.random_normal_initializer(stddev=0.02), scope='g_enc_conv3')
conv3 = lrelu(conv3)
conv3 = conv_cond_concat(conv3, y)
conv4 = tcl.conv2d(conv3, 512, 4, 2, activation_fn=tf.identity, normalizer_fn=tcl.batch_norm, weights_initializer=tf.random_normal_initializer(stddev=0.02), scope='g_enc_conv4')
conv4 = lrelu(conv4)
conv4 = conv_cond_concat(conv4, y)
conv5 = tcl.conv2d(conv4, 512, 4, 2, activation_fn=tf.identity, normalizer_fn=tcl.batch_norm, weights_initializer=tf.random_normal_initializer(stddev=0.02), scope='g_enc_conv5')
conv5 = lrelu(conv5)
conv5 = conv_cond_concat(conv5, y)
conv6 = tcl.conv2d(conv5, 512, 4, 2, activation_fn=tf.identity, normalizer_fn=tcl.batch_norm, weights_initializer=tf.random_normal_initializer(stddev=0.02), scope='g_enc_conv6')
conv6 = lrelu(conv6)
print 'conv1:',conv1
print 'conv2:',conv2
print 'conv3:',conv3
print 'conv4:',conv4
print 'conv5:',conv5
print 'conv6:',conv6
out = [conv1, conv2, conv3, conv4, conv5, conv6]
return out,y
评论列表
文章目录