def vgg_arg_scope(
weight_decay=0.0005,
use_batch_norm=False):
""""""
batch_norm_params = {
# Decay for the moving averages.
'decay': 0.9997,
# epsilon to prevent 0s in variance.
'epsilon': 0.001,
}
normalizer_fn = layers.batch_norm if use_batch_norm else None
normalizer_params = batch_norm_params if use_batch_norm else None
l2_regularizer = layers.l2_regularizer(weight_decay) # 0.00004
with arg_scope(
[layers.fully_connected],
biases_initializer=tf.constant_initializer(0.1),
weights_initializer=layers.variance_scaling_initializer(factor=1.0),
weights_regularizer=l2_regularizer,
activation_fn=tf.nn.relu):
with arg_scope(
[layers.conv2d],
normalizer_fn=normalizer_fn,
normalizer_params=normalizer_params,
weights_initializer=layers.variance_scaling_initializer(factor=1.0),
weights_regularizer=l2_regularizer,
activation_fn=tf.nn.relu) as arg_sc:
return arg_sc
评论列表
文章目录