def dense_layer(x, num_neurons, name, activation, use_bn=False, is_train=True, stddv=0.02):
if len(x.get_shape().as_list()) > 2:
n, h, w, c = x.get_shape().as_list()
d = h * w * c
else:
n, d = x.get_shape().as_list()
with tf.variable_scope(name):
# flatten x
x = tf.reshape(x, (-1, d))
w = tf.get_variable("weight", shape=(d, num_neurons), initializer=tf.random_normal_initializer(stddev=stddv))
b = tf.get_variable("bias", shape=num_neurons, initializer=tf.constant_initializer(0.01))
y = tf.matmul(x, w) + b
if use_bn:
y = batch_norm(y, name=tf.get_variable_scope().name, is_train=is_train)
print("Dense Layer %s, output size %s" % (tf.get_variable_scope().name, y.get_shape().as_list()))
return activation(y)
评论列表
文章目录