def lrelu(x, leak=0.2, name=None):
"""Leaky rectified linear unit.
Parameters
----------
x : Tensor
The tensor to apply the nonlinearity to.
leak : float, optional
Leakage parameter.
name : str, optional
Variable scope to use.
Returns
-------
x : Tensor
Output of the nonlinearity.
"""
with tf.op_scope([x], name, 'lrelu'):
f1 = 0.5 * (1 + leak)
f2 = 0.5 * (1 - leak)
x = tf.add(f1 * x, f2 * abs(x))
return x
评论列表
文章目录