def transpose_conv2d(x, output_shape, kernel_h=5, kernel_w=5, activation=tf.nn.relu, stride=2, padding="VALID",
use_bn=True, is_train=True, stddv=0.02, name="transpose_conv2d"):
n, h, w, c = x.get_shape().as_list()
num_kernels = output_shape[-1]
with tf.variable_scope(name):
w = tf.get_variable(name="weight", initializer=tf.truncated_normal_initializer(stddev=stddv),
shape=(kernel_h, kernel_w, num_kernels, c))
bias = tf.get_variable(name="bias", initializer=tf.constant_initializer(0.01), shape=num_kernels)
y = tf.nn.conv2d_transpose(x, w, output_shape=output_shape, padding=padding,
strides=(1, stride, stride, 1))
y = tf.nn.bias_add(y, bias)
if use_bn:
y = batch_norm(y, tf.get_variable_scope().name, is_train)
print("Transposed Convolutional 2D Layer %s, kernel size %s, output size %s Reuse:%s"
% (tf.get_variable_scope().name, (kernel_h, kernel_w, c, num_kernels), y.get_shape().as_list(),
tf.get_variable_scope().reuse))
return activation(y)
评论列表
文章目录