def leaky_relu(x, alpha=0.01, name='leaky_relu', outputs_collections=None, **unused):
"""
Computes leaky relu
Args:
x: a `Tensor` with type `float`, `double`, `int32`, `int64`, `uint8`, int16`, or `int8`.
aplha: the conatant fro scalling the activation
name: a optional scope/name of the layer
outputs_collections: The collections to which the outputs are added.
Returns:
A `Tensor` representing the results of the activation operation.
"""
_check_unused(unused, name)
with tf.name_scope(name):
try:
output = tf.nn.relu(x) + tf.mul(alpha, (x - tf.abs(x))) * 0.5
except Exception:
output = tf.nn.relu(x) + tf.multiply(alpha, (x - tf.abs(x))) * 0.5
return _collect_named_outputs(outputs_collections, name, output)
评论列表
文章目录