def prelu(features, alpha, scope=None):
"""
Implementation of [Parametric ReLU](https://arxiv.org/abs/1502.01852) borrowed from Keras.
"""
with tf.variable_scope(scope, 'PReLU'):
pos = tf.nn.relu(features)
neg = alpha * (features - tf.abs(features)) * 0.5
return pos + neg
评论列表
文章目录