def applyActivationFunction_PReLU( inputData, PreluActivations ) :
"""Parametric Rectified Linear Unit.
It follows:
`f(x) = alpha * x for x < 0`,
`f(x) = x for x >= 0`,
where `alpha` is a learned array with the same shape as x.
- The input is a tensor of shape (batchSize, FeatMaps, xDim, yDim, zDim) """
preluActivationsAsRow = PreluActivations.dimshuffle('x', 0, 'x', 'x', 'x')
pos = T.maximum(0, inputData)
neg = preluActivationsAsRow * (inputData - abs(inputData)) * 0.5
output = pos + neg
return (output)
# --- version 2 ---
评论列表
文章目录