def _get_embedding_layer(self, embedding_file=None):
if self.embedding_layer is None:
if embedding_file is None:
if not self.tune_embedding:
print >>sys.stderr, "Pretrained embedding is not given. Setting tune_embedding to True."
self.tune_embedding = True
embedding = None
else:
# Put the embedding in a list for Keras to treat it as initiali weights of the embedding
# layer.
embedding = [self.data_processor.get_embedding_matrix(embedding_file, onto_aware=False)]
vocab_size = self.data_processor.get_vocab_size(onto_aware=False)
self.embedding_layer = Embedding(input_dim=vocab_size, output_dim=self.embed_dim,
weights=embedding, trainable=self.tune_embedding,
mask_zero=True, name="embedding")
return self.embedding_layer
评论列表
文章目录