def fc_relu(self, input_tensor, num_outputs, relu=False, batch_norm=False, weight_std=0.005,
bias_init_value=0.1, name=None):
if batch_norm and not relu:
raise ValueError('Cannot use batch normalization without following RELU')
with tf.variable_scope(name) as scope:
num_inputs = int(np.prod(input_tensor.get_shape()[1:]))
w, b = self.get_fc_weights(num_inputs, num_outputs,
weight_std=weight_std,
bias_init_value=bias_init_value)
fc_relu = None
input_tensor_reshaped = tf.reshape(input_tensor, [-1, num_inputs])
fc = tf.add(tf.matmul(input_tensor_reshaped, w), b, name='fc' if relu or batch_norm else name)
if batch_norm:
fc = tf.cond(self.is_phase_train,
lambda: tflayers.batch_norm(fc,
decay=self.batch_norm_decay,
is_training=True,
trainable=True,
reuse=None,
scope=scope),
lambda: tflayers.batch_norm(fc,
decay=self.batch_norm_decay,
is_training=False,
trainable=True,
reuse=True,
scope=scope))
if relu:
fc_relu = tf.nn.relu(fc, name=name)
return fc, fc_relu
评论列表
文章目录