def apply_activation(self, lin_output, activation):
if activation == 'SIGMOID':
final_output = T.nnet.sigmoid(lin_output)
elif activation == 'TANH':
final_output = T.tanh(lin_output)
elif activation == 'LINEAR':
final_output = lin_output
elif activation == 'ReLU': ## rectifier linear unit
final_output = T.maximum(0.0, lin_output)
elif activation == 'ReSU': ## rectifier smooth unit
final_output = numpy.log(1.0 + numpy.exp(lin_output))
else:
self.logger.critical('the input activation function: %s is not supported right now. Please modify layers.py to support' % (activation))
raise
return final_output
评论列表
文章目录