def _make_embedding_layer(self, word_index):
embeddings = self._get_embeddings()
nb_words = min(self.max_nr_words, len(word_index))
embedding_matrix = np.zeros((nb_words, self.embedding_dim))
for word, i in word_index.items():
if i >= self.max_nr_words:
continue
embedding_vector = embeddings.get(word)
if embedding_vector is not None:
embedding_matrix[i] = embedding_vector
embedding_layer = Embedding(nb_words, self.embedding_dim,
weights=[embedding_matrix],
input_length=self.sequence_length, trainable=False)
return embedding_layer
评论列表
文章目录