def make_deep_learning_classifier(hidden_layers=None, num_cols=None, optimizer='adam', dropout_rate=0.2, weight_constraint=0, final_activation='sigmoid', feature_learning=False):
if feature_learning == True and hidden_layers is None:
hidden_layers = [1, 1, 0.5]
if hidden_layers is None:
hidden_layers = [1, 1, 1]
# The hidden_layers passed to us is simply describing a shape. it does not know the num_cols we are dealing with, it is simply values of 0.5, 1, and 2, which need to be multiplied by the num_cols
scaled_layers = []
for layer in hidden_layers:
scaled_layers.append(int(num_cols * layer))
# If we're training this model for feature_learning, our penultimate layer (our final hidden layer before the "output" layer) will always have 10 neurons, meaning that we always output 10 features from our feature_learning model
if feature_learning == True:
scaled_layers.append(10)
model = Sequential()
# There are times we will want the output from our penultimate layer, not the final layer, so give it a name that makes the penultimate layer easy to find
model.add(Dense(hidden_layers[0], input_dim=num_cols, kernel_initializer='normal', kernel_regularizer=regularizers.l2(0.01)))
model.add(PReLU())
for layer_size in scaled_layers[1:-1]:
model.add(Dense(layer_size, kernel_initializer='normal', kernel_regularizer=regularizers.l2(0.01)))
model.add(PReLU())
model.add(Dense(scaled_layers[-1], kernel_initializer='normal', name='penultimate_layer', kernel_regularizer=regularizers.l2(0.01)))
model.add(PReLU())
model.add(Dense(1, kernel_initializer='normal', activation=final_activation))
model.compile(loss='binary_crossentropy', optimizer=optimizer, metrics=['accuracy', 'poisson'])
return model
评论列表
文章目录