def make_deep_learning_classifier(hidden_layers=None, num_cols=None, optimizer='Adadelta', dropout_rate=0.2, weight_constraint=0, final_activation='sigmoid', feature_learning=False, activation='elu', kernel_initializer='normal'):
if feature_learning == True and hidden_layers is None:
hidden_layers = [1, 0.75, 0.25]
if hidden_layers is None:
hidden_layers = [1, 0.75, 0.25]
# The hidden_layers passed to us is simply describing a shape. it does not know the num_cols we are dealing with, it is simply values of 0.5, 1, and 2, which need to be multiplied by the num_cols
scaled_layers = []
for layer in hidden_layers:
scaled_layers.append(min(int(num_cols * layer), 10))
# If we're training this model for feature_learning, our penultimate layer (our final hidden layer before the "output" layer) will always have 10 neurons, meaning that we always output 10 features from our feature_learning model
if feature_learning == True:
scaled_layers.append(10)
model = Sequential()
# There are times we will want the output from our penultimate layer, not the final layer, so give it a name that makes the penultimate layer easy to find
model.add(Dense(scaled_layers[0], input_dim=num_cols, kernel_initializer=kernel_initializer, kernel_regularizer=regularizers.l2(0.01)))
model.add(get_activation_layer(activation))
for layer_size in scaled_layers[1:-1]:
model.add(Dense(layer_size, kernel_initializer=kernel_initializer, kernel_regularizer=regularizers.l2(0.01)))
model.add(get_activation_layer(activation))
model.add(Dense(scaled_layers[-1], kernel_initializer=kernel_initializer, name='penultimate_layer', kernel_regularizer=regularizers.l2(0.01)))
model.add(get_activation_layer(activation))
model.add(Dense(1, kernel_initializer=kernel_initializer, activation=final_activation))
model.compile(loss='binary_crossentropy', optimizer=get_optimizer(optimizer), metrics=['accuracy', 'poisson'])
return model
评论列表
文章目录