def activation(layer, layer_in, layerId):
out = {}
if (layer['info']['type'] == 'ReLU'):
if (layer['params']['negative_slope'] != 0):
out[layerId] = LeakyReLU(alpha=layer['params']['negative_slope'])(*layer_in)
else:
out[layerId] = Activation('relu')(*layer_in)
elif (layer['info']['type'] == 'PReLU'):
out[layerId] = PReLU()(*layer_in)
elif (layer['info']['type'] == 'ELU'):
out[layerId] = ELU(alpha=layer['params']['alpha'])(*layer_in)
elif (layer['info']['type'] == 'ThresholdedReLU'):
out[layerId] = ThresholdedReLU(theta=layer['params']['theta'])(*layer_in)
elif (layer['info']['type'] == 'Sigmoid'):
out[layerId] = Activation('sigmoid')(*layer_in)
elif (layer['info']['type'] == 'TanH'):
out[layerId] = Activation('tanh')(*layer_in)
elif (layer['info']['type'] == 'Softmax'):
out[layerId] = Activation('softmax')(*layer_in)
elif (layer['info']['type'] == 'SELU'):
out[layerId] = Activation('selu')(*layer_in)
elif (layer['info']['type'] == 'Softplus'):
out[layerId] = Activation('softplus')(*layer_in)
elif (layer['info']['type'] == 'Softsign'):
out[layerId] = Activation('softsign')(*layer_in)
elif (layer['info']['type'] == 'HardSigmoid'):
out[layerId] = Activation('hard_sigmoid')(*layer_in)
return out
评论列表
文章目录