def fc(layer, output_size, is_training,
weight_init, weight_reg=None, activation_fn=None,
use_batch_norm=False, scope='fc'):
if use_batch_norm:
batch_norm_args = {
'normalizer_fn': batch_norm,
'normalizer_params': {
'is_training': is_training,
}
}
else:
batch_norm_args = {}
with tf.variable_scope(scope):
return fully_connected(
layer,
num_outputs=output_size,
activation_fn=activation_fn,
weights_initializer=weight_init,
weights_regularizer=weight_reg,
biases_initializer=tf.constant_initializer(0.0),
scope=scope,
**batch_norm_args
)
评论列表
文章目录