def __init__(self, num_units_out, activation=tf.nn.relu, initializer=None,
input_keep_prob=None, output_keep_prob=None,
normalization_fn=None, weight_norm=False, name=None):
"""Initializes the layer.
Args:
num_units_out: The number of output units in the layer.
activation: The activation function. Default is ReLU. Use `None` to get a
linear layer.
initializer: The initializer for the weights. Defaults to uniform unit
scaling with factor derived in <http://arxiv.org/pdf/1412.6558v3.pdf>
if activation is ReLU, ReLU6, tanh, or linear. Otherwise defaults to
truncated normal initialization with a standard deviation of 0.01.
input_keep_prob: Optional scalar float32 tensor for dropout on input.
Feed 1.0 at serving to disable dropout.
output_keep_prob: Optional scalar float32 tensor for dropout on output.
Feed 1.0 at serving to disable dropout.
normalization_fn: Optional normalization function that will be inserted
before nonlinearity.
weight_norm: A bool to control whether weight normalization is used. See
https://arxiv.org/abs/1602.07868 for how it works.
name: An optional string name. Defaults to `FC_%d % num_units_out`. Used
to name the variable scope where the variables for the layer live.
"""
self.set_constructor_args('td.FC', *get_local_arguments(FC.__init__, True))
if not initializer:
# TODO(SamEisenstat): This constant is calibrated for ReLU, something else
# might be better for ReLU6.
if activation in [tf.nn.relu, tf.nn.relu6]:
initializer = tf.uniform_unit_scaling_initializer(1.43)
elif activation == tf.tanh:
initializer = tf.uniform_unit_scaling_initializer(1.15)
elif not activation:
initializer = tf.uniform_unit_scaling_initializer(1.0)
else:
initializer = tf.truncated_normal_initializer(stddev=0.01)
self._activation = activation
self._initializer = initializer
self._input_keep_prob = input_keep_prob
self._output_keep_prob = output_keep_prob
self._normalization_fn = normalization_fn
self._weight_norm = weight_norm
if name is None: name = 'FC_%d' % num_units_out
super(FC, self).__init__(
output_type=tdt.TensorType([num_units_out]), name_or_scope=name)
评论列表
文章目录