def layer_norm(inputs, epsilon=1e-6, dtype=None, scope=None):
""" Layer Normalization
Args:
inputs: A Tensor of shape [..., channel_size]
epsilon: A floating number
dtype: An optional instance of tf.DType
scope: An optional string
Returns:
A Tensor with the same shape as inputs
"""
with tf.variable_scope(scope, default_name="layer_norm", values=[inputs],
dtype=dtype):
channel_size = inputs.get_shape().as_list()[-1]
scale = tf.get_variable("scale", shape=[channel_size],
initializer=tf.ones_initializer())
offset = tf.get_variable("offset", shape=[channel_size],
initializer=tf.zeros_initializer())
mean = tf.reduce_mean(inputs, axis=-1, keep_dims=True)
variance = tf.reduce_mean(tf.square(inputs - mean), axis=-1,
keep_dims=True)
norm_inputs = (inputs - mean) * tf.rsqrt(variance + epsilon)
return norm_inputs * scale + offset
评论列表
文章目录