def get_activation_layer(activation):
if activation == 'LeakyReLU':
return LeakyReLU()
if activation == 'PReLU':
return PReLU()
if activation == 'ELU':
return ELU()
if activation == 'ThresholdedReLU':
return ThresholdedReLU()
return Activation(activation)
# TODO: same for optimizers, including clipnorm
评论列表
文章目录