def fit_embeddings(self, documents):
"""
Train word embeddings of the classification model, using the same parameter values for classification on Gensim ``Word2Vec``.
Similar to use a pre-trained model.
:param documents:
"""
params = self.get_params()
del params['pre_trained']
del params['bucket']
# Word2Vec has not softmax
if params['loss'] == 'softmax':
params['loss'] = 'hs'
LabeledWord2Vec.init_loss(LabeledWord2Vec(), params, params['loss'])
del params['loss']
w2v = Word2Vec(sentences=documents, **params)
self._classifier = LabeledWord2Vec.load_from(w2v)
评论列表
文章目录