def crelu(x):
""" CReLU
Computes Concatenated ReLU.
Concatenates a ReLU which selects only the positive part of the activation
with a ReLU which selects only the negative part of the activation. Note
that as a result this non-linearity doubles the depth of the activations.
Arguments:
x : A `Tensor` with type `float`, `double`, `int32`, `int64`, `uint8`,
`int16`, or `int8`.
Returns:
A `Tensor` with the same type as `x`.
Links:
[https://arxiv.org/abs/1603.05201](https://arxiv.org/abs/1603.05201)
"""
return tf.nn.crelu(x)
评论列表
文章目录