def relu(x, alpha=0., max_value=None):
"""Rectified linear unit.
With default values, it returns element-wise `max(x, 0)`.
# Arguments
x: A tensor or variable.
alpha: A scalar, slope of negative section (default=`0.`).
max_value: Saturation threshold.
# Returns
A tensor.
"""
if alpha != 0.:
negative_part = tf.nn.relu(-x)
x = tf.nn.relu(x)
if max_value is not None:
max_value = _to_tensor(max_value, x.dtype.base_dtype)
zero = _to_tensor(0., x.dtype.base_dtype)
x = tf.clip_by_value(x, zero, max_value)
if alpha != 0.:
alpha = _to_tensor(alpha, x.dtype.base_dtype)
x -= alpha * negative_part
return x
tensorflow_backend.py 文件源码
python
阅读 31
收藏 0
点赞 0
评论 0
评论列表
文章目录