def _build_network(self, vocab_size, maxlen, emb_weights=[], hidden_units=256, trainable=False):
print('Build model...')
model = Sequential()
model.add(Embedding(vocab_size, emb_weights.shape[1], input_length=maxlen, weights=[emb_weights],
trainable=trainable))
model.add(Reshape((maxlen,emb_weights.shape[1],1)))
model.add(BatchNormalization(momentum=0.9))
# model.add(Convolution2D(int(hidden_units/8), (5,5), kernel_initializer='he_normal', padding='valid', activation='sigmoid'))
# model.add(MaxPooling2D((2,2)))
# model.add(Dropout(0.5))
#
# model.add(Convolution2D(int(hidden_units/4), (5,5), kernel_initializer='he_normal', padding='valid', activation='sigmoid'))
# model.add(MaxPooling2D((2,2)))
# model.add(Dropout(0.5))
model.add(TimeDistributed(LSTM(hidden_units, kernel_initializer='he_normal', activation='sigmoid', dropout=0.5, return_sequences=True)))
model.add(TimeDistributed(LSTM(hidden_units, kernel_initializer='he_normal', activation='sigmoid', dropout=0.5)))
model.add(Flatten())
# model.add(Dense(int(hidden_units/2), kernel_initializer='he_normal', activation='sigmoid'))
# model.add(Dropout(0.5))
model.add(Dense(2,activation='softmax'))
adam = Adam(lr=0.0001)
model.compile(loss='categorical_crossentropy', optimizer=adam, metrics=['accuracy'])
print('No of parameter:', model.count_params())
print(model.summary())
return model
sarcasm_detection_model_CNN_LSTM_DNN_2D.py 文件源码
python
阅读 31
收藏 0
点赞 0
评论 0
评论列表
文章目录