def _get_normalizer(is_training, use_bn, use_ln):
'''
Helper to get normalizer function and params
'''
batch_norm_params = {'is_training': is_training,
'decay': 0.999, 'center': True,
'scale': True, 'updates_collections': None}
layer_norm_params = {'center': True, 'scale': True}
if use_ln:
print 'using layer norm'
normalizer_fn = slim.layer_norm
normalizer_params = layer_norm_params
elif use_bn:
print 'using batch norm'
normalizer_fn = slim.batch_norm
normalizer_params = batch_norm_params
else:
print 'not using any layer normalization scheme'
normalizer_fn = None
normalizer_params = None
return [normalizer_fn, normalizer_params]
评论列表
文章目录