def applyActivationFunction_PReLU_v3(inputData,PreluActivations) :
""" inputData is a tensor5D with shape:
(batchSize,
Number of feature Maps,
convolvedImageShape[0],
convolvedImageShape[1],
convolvedImageShape[2]) """
# The input is a tensor of shape (batchSize, FeatMaps, xDim, yDim, zDim)
preluActivationsAsRow = PreluActivations.dimshuffle('x', 0, 'x', 'x', 'x')
pos = 0.5 * (1 + preluActivationsAsRow )
neg = 0.5 * (1 - preluActivationsAsRow )
output = pos * inputData + neg * abs(inputData)
return ( output)
# Benchmark on ReLU/PReLU activations:
# http://gforge.se/2015/06/benchmarking-relu-and-prelu/
# TODO. Implement some other activation functions:
# Ex: Randomized ReLU
# S-shape Relu
# ThresholdedReLU
评论列表
文章目录