def get_initializer(params):
if params.initializer == "uniform":
max_val = params.initializer_gain
return tf.random_uniform_initializer(-max_val, max_val)
elif params.initializer == "normal":
return tf.random_normal_initializer(0.0, params.initializer_gain)
elif params.initializer == "normal_unit_scaling":
return tf.variance_scaling_initializer(params.initializer_gain,
mode="fan_avg",
distribution="normal")
elif params.initializer == "uniform_unit_scaling":
return tf.variance_scaling_initializer(params.initializer_gain,
mode="fan_avg",
distribution="uniform")
else:
raise ValueError("Unrecognized initializer: %s" % params.initializer)
python类variance_scaling_initializer()的实例源码
def get_initializer(params):
if params.initializer == "uniform":
max_val = params.initializer_gain
return tf.random_uniform_initializer(-max_val, max_val)
elif params.initializer == "normal":
return tf.random_normal_initializer(0.0, params.initializer_gain)
elif params.initializer == "normal_unit_scaling":
return tf.variance_scaling_initializer(params.initializer_gain,
mode="fan_avg",
distribution="normal")
elif params.initializer == "uniform_unit_scaling":
return tf.variance_scaling_initializer(params.initializer_gain,
mode="fan_avg",
distribution="uniform")
else:
raise ValueError("Unrecognized initializer: %s" % params.initializer)
def conv2d_fixed_padding(inputs, filters, kernel_size, strides):
"""Strided 2-D convolution with explicit padding.
The padding is consistent and is based only on `kernel_size`, not on the
dimensions of `inputs` (as opposed to using `tf.layers.conv2d` alone).
Args:
inputs: A Tensor of size [batch, channels, height_in, width_in].
filters: The number of filters in the convolution.
kernel_size: The size of the kernel to be used in the convolution.
strides: The strides of the convolution.
Returns:
A Tensor of shape [batch, filters, height_out, width_out].
"""
if strides > 1:
inputs = fixed_padding(inputs, kernel_size)
return tf.layers.conv2d(
inputs=inputs, filters=filters, kernel_size=kernel_size, strides=strides,
padding=('SAME' if strides == 1 else 'VALID'), use_bias=False,
kernel_initializer=tf.variance_scaling_initializer(),
data_format='channels_first')
def conv2d_fixed_padding(self, inputs, filters, kernel_size, strides, name=None, relu=True):
if strides > 1:
inputs = self.fixed_padding(inputs, kernel_size)
inputs = tf.layers.conv2d(
inputs=inputs, filters=filters, kernel_size=kernel_size, strides=strides,
padding=('SAME' if strides == 1 else 'VALID'), use_bias=False,
kernel_initializer=tf.variance_scaling_initializer(), name=name)
if relu:
return self.batch_norm_relu(inputs, name)
else:
return self.batch_norm(inputs, name)
def conv2d_fixed_padding(**kwargs):
"""conv2d with fixed_padding, based only on kernel_size."""
strides = kwargs["strides"]
if strides > 1:
kwargs["inputs"] = fixed_padding(kwargs["inputs"], kwargs["kernel_size"],
kwargs["data_format"])
defaults = {
"padding": ("SAME" if strides == 1 else "VALID"),
"use_bias": False,
"kernel_initializer": tf.variance_scaling_initializer(),
}
defaults.update(kwargs)
return tf.layers.conv2d(**defaults)
def get_variable_initializer(hparams):
"""Get variable initializer from hparams."""
if hparams.initializer == "orthogonal":
return tf.orthogonal_initializer(gain=hparams.initializer_gain)
elif hparams.initializer == "uniform":
max_val = 0.1 * hparams.initializer_gain
return tf.random_uniform_initializer(-max_val, max_val)
elif hparams.initializer == "normal_unit_scaling":
return tf.variance_scaling_initializer(
hparams.initializer_gain, mode="fan_avg", distribution="normal")
elif hparams.initializer == "uniform_unit_scaling":
return tf.variance_scaling_initializer(
hparams.initializer_gain, mode="fan_avg", distribution="uniform")
else:
raise ValueError("Unrecognized initializer: %s" % hparams.initializer)