def conv2d(x, num_kernels, kernel_h=5, kernel_w=5, strides=2, padding="VALID", name="conv2d",
use_bn=True, activation=tf.nn.relu, alpha=None, is_train=True, stddv=0.02):
"""
Wrapper function for convolutional layer
"""
n, h, w, c = x.get_shape().as_list()
with tf.variable_scope(name):
w = tf.get_variable(name="weight", initializer=tf.truncated_normal_initializer(stddev=stddv),
shape=(kernel_h, kernel_w, c, num_kernels))
bias = tf.get_variable(name="bias", initializer=tf.constant_initializer(0.01), shape=num_kernels)
y = tf.nn.conv2d(x, w, (1, strides, strides, 1), padding)
y = tf.nn.bias_add(y, bias)
if use_bn:
y = batch_norm(y, tf.get_variable_scope().name, is_train)
print("Convolutional 2D Layer %s, kernel size %s, output size %s Reuse:%s"
% (tf.get_variable_scope().name, (kernel_h, kernel_w, c, num_kernels), y.get_shape().as_list(),
tf.get_variable_scope().reuse))
if alpha is None:
y = activation(y)
else:
y = activation(y, alpha)
return y
评论列表
文章目录