def elu(x, alpha=1.):
'''Exponential linear unit.
# Arguments
x: Tensor to compute the activation function for.
alpha: scalar
'''
res = tf.nn.elu(x)
if alpha == 1:
return res
else:
return tf.select(x > 0, res, alpha * res)
评论列表
文章目录