def dropout_selu(x, is_training, drop_p=0.2, alpha=-1.7580993408473766, fixedPointMean=0.0, fixedPointVar=1.0,
noise_shape=None, seed=None, name='dropout_selu', outputs_collections=None, **unused):
"""
Dropout layer for self normalizing networks
Args:
x: a `Tensor`.
is_training: a bool, training or validation
drop_p: probability of droping unit
fixedPointsMean: float, the mean used to calculate the selu parameters
fixedPointsVar: float, the Variance used to calculate the selu parameters
alpha: float, product of the two selu parameters
name: a optional scope/name of the layer
outputs_collections: The collections to which the outputs are added.
Returns:
A `Tensor` representing the results of the dropout operation.
"""
_check_unused(unused, name)
def dropout_selu_impl(x, drop_p, alpha, noise_shape, seed, name):
keep_prob = 1.0 - drop_p
if isinstance(keep_prob, numbers.Real) and not 0 < keep_prob <= 1:
raise ValueError("keep_prob must be a scalar tensor or a float in the "
"range (0, 1], got %g" % keep_prob)
keep_prob = tf.convert_to_tensor(
keep_prob, dtype=x.dtype, name="keep_prob")
keep_prob.get_shape().assert_has_rank(0)
alpha = tf.convert_to_tensor(alpha, dtype=x.dtype, name="alpha")
alpha.get_shape().assert_has_rank(0)
if tf.contrib.util.constant_value(keep_prob) == 1:
return x
noise_shape = noise_shape if noise_shape is not None else x.get_shape().as_list()
random_tensor = keep_prob
random_tensor += tf.random_uniform(noise_shape,
seed=seed, dtype=x.dtype)
binary_tensor = tf.floor(random_tensor)
ret = x * binary_tensor + alpha * (1 - binary_tensor)
a = tf.sqrt(fixedPointVar / (keep_prob * ((1 - keep_prob) *
math_ops.pow(alpha - fixedPointMean, 2) + fixedPointVar)))
b = fixedPointMean - a * \
(keep_prob * fixedPointMean + (1 - keep_prob) * alpha)
ret = a * ret + b
ret.set_shape(x.get_shape())
return ret
with tf.name_scope(name, [x]):
if is_training:
output = dropout_selu_impl(
x, drop_p, alpha, noise_shape, seed, name)
else:
output = x
return _collect_named_outputs(outputs_collections, name, output)
评论列表
文章目录