def spatial_batch_norm(input_layer, name='spatial_batch_norm'):
"""
Batch-normalizes the layer as in http://arxiv.org/abs/1502.03167
This is important since it allows the different scales to talk to each other when they get joined.
"""
mean, variance = tf.nn.moments(input_layer, [0, 1, 2])
variance_epsilon = 0.01 # TODO: Check what this value should be
inv = tf.rsqrt(variance + variance_epsilon)
num_channels = input_layer.get_shape().as_list()[3] # TODO: Clean this up
scale = tf.Variable(tf.random_uniform([num_channels]), name='scale') # TODO: How should these initialize?
offset = tf.Variable(tf.random_uniform([num_channels]), name='offset')
return_val = tf.sub(tf.mul(tf.mul(scale, inv), tf.sub(input_layer, mean)), offset, name=name)
return return_val
network_helpers.py 文件源码
python
阅读 30
收藏 0
点赞 0
评论 0
评论列表
文章目录