def _build_network(self, vocab_size, maxlen, emb_weights=[], hidden_units=256, trainable=False):
print('Build model...')
model = Sequential()
model.add(Embedding(vocab_size, emb_weights.shape[1], input_length=maxlen, weights=[emb_weights],
trainable=trainable))
# model.add(Reshape((maxlen, emb_weights.shape[1], 1)))
model.add(Convolution1D(emb_weights.shape[1], 3, kernel_initializer='he_normal', padding='valid',
activation='sigmoid',
input_shape=(1, maxlen)))
# model.add(MaxPooling1D(pool_size=3))
model.add(Convolution1D(emb_weights.shape[1], 3, kernel_initializer='he_normal', padding='valid',
activation='sigmoid',
input_shape=(1, maxlen - 2)))
# model.add(MaxPooling1D(pool_size=3))
model.add(Dropout(0.25))
model.add(LSTM(hidden_units, kernel_initializer='he_normal', activation='sigmoid', dropout=0.5,
return_sequences=True))
model.add(LSTM(hidden_units, kernel_initializer='he_normal', activation='sigmoid', dropout=0.5))
model.add(Dense(hidden_units, kernel_initializer='he_normal', activation='sigmoid'))
model.add(Dense(2, activation='softmax'))
adam = Adam(lr=0.0001)
model.compile(loss='categorical_crossentropy', optimizer=adam, metrics=['accuracy'])
print('No of parameter:', model.count_params())
print(model.summary())
return model
sarcasm_detection_model_CNN_LSTM_DNN_word2vec.py 文件源码
python
阅读 24
收藏 0
点赞 0
评论 0
评论列表
文章目录