def relu(x, alpha=0., max_value=None):
'''Rectified linear unit
# Arguments
alpha: slope of negative section.
max_value: saturation threshold.
'''
if alpha != 0.:
negative_part = tf.nn.relu(-x)
x = tf.nn.relu(x)
if max_value is not None:
max_value = _to_tensor(max_value, x.dtype.base_dtype)
zero = _to_tensor(0., x.dtype.base_dtype)
x = tf.clip_by_value(x, zero, max_value)
if alpha != 0.:
alpha = _to_tensor(alpha, x.dtype.base_dtype)
x -= alpha * negative_part
return x
评论列表
文章目录