def layer_norm(x, filters=None, epsilon=1e-6, name=None, reuse=None):
"""Layer normalize the tensor x, averaging over the last dimension."""
if filters is None:
filters = x.get_shape()[-1]
with tf.variable_scope(
name, default_name="layer_norm", values=[x], reuse=reuse):
scale = tf.get_variable(
"layer_norm_scale", [filters], initializer=tf.ones_initializer())
bias = tf.get_variable(
"layer_norm_bias", [filters], initializer=tf.zeros_initializer())
if allow_defun:
result = layer_norm_compute(x, tf.constant(epsilon), scale, bias)
result.set_shape(x.get_shape())
else:
result = layer_norm_compute_python(x, epsilon, scale, bias)
return result
评论列表
文章目录