def generate_model(self):
k_inp = Input(shape=(self.max_len,), dtype='int32', name='input')
k_emb = Embedding(input_dim=self.max_features+3, output_dim=self.embedding_dim,
input_length=self.max_len, weights=self.embedding_weights)(k_inp)
k_conv_list = []
for n in self.filter_size:
k_conv = Convolution1D(nb_filter=self.num_filters,
filter_length=n,
border_mode='valid',
activation='relu',
subsample_length=1)(k_emb)
k_maxpool1d = MaxPooling1D(pool_length=self.max_len - n + 1)(k_conv)
k_flatten = Flatten()(k_maxpool1d)
k_conv_list.append(k_flatten)
if len(k_conv_list)==1:
k_merge = k_conv_list[0]
else:
k_merge = merge(k_conv_list, mode='concat', concat_axis=1)
# add hidden layers if wanted
last_dims = len(self.filter_size)*self.num_filters
last_layer = k_merge
if self.num_hidden_layers == 0:
# put dropout after merge if no hidden layers
last_layer = Dropout(self.dropout)(last_layer)
for n in range(self.num_hidden_layers):
k_dn = Dense(self.dim_hidden_layers, input_dim=last_dims, W_regularizer=l2(3))(last_layer)
k_dp = Dropout(self.dropout)(k_dn)
last_layer = Activation('relu')(k_dp)
last_dims = self.dim_hidden_layers
k_dn = Dense(1, input_dim=last_dims)(last_layer)
k_dp = Dropout(self.dropout)(k_dn)
k_out = Activation('sigmoid', name="output")(k_dp)
model = Model(input=[k_inp], output=[k_out])
model.compile(loss='binary_crossentropy',
optimizer=self.optimizer,
# metrics=['accuracy', num_true, target_tp_t, f1_score, precision, recall, specificity, spec_at_sens2, y_sum, y_ones, y_zeros, y_element,
# yp_sum, yp_mean, yp_element])
# metrics=['accuracy', f1_score, precision, recall, specificity, specificity_at_recall, discriminance])
metrics=['accuracy'])
return model
评论列表
文章目录