def _fc_relu_layers(self, bottom, dim, name = None):
with tf.name_scope(name) as scope:
shape = int(np.prod(bottom.get_shape()[1:]))
weights = tf.Variable(tf.truncated_normal([shape, dim],
dtype=tf.float32, stddev=0.005), name='weights')
bias = tf.Variable(tf.constant(1.0, shape=[dim], dtype=tf.float32), name='biases')
bottom_flat = tf.reshape(bottom, [-1, shape])
fc_weights = tf.nn.bias_add(tf.matmul(bottom_flat, weights), bias)
self.parameters[name] = [weights, bias]
if not tf.get_variable_scope().reuse:
weight_decay = tf.multiply(tf.nn.l2_loss(weights), self.wd,
name='fc_relu_weight_loss')
tf.add_to_collection(tf.GraphKeys.REGULARIZATION_LOSSES,
weight_decay)
top = tf.nn.relu(fc_weights, name=scope)
_activation_summary(top)
top = tf.Print(top, [tf.shape(top)], message='Shape of %s' % name, first_n = 1, summarize=4)
return top
评论列表
文章目录