def _embed_sequence_with_length(self, embeddings, input_text):
# calculate max length of the input_text
mask_words = tf.greater(input_text, 0) # true for words false for padding
words_length = tf.reduce_sum(tf.cast(mask_words, tf.int32), -1)
mask_sentences = tf.greater(words_length, 0)
sentences_length = tf.reduce_sum(tf.cast(mask_sentences, tf.int32), 1)
input_text = tf.add(input_text, 1)
embedded_sequence = tf.nn.embedding_lookup(embeddings, input_text)
return embedded_sequence, sentences_length, words_length
text_classification_model_han.py 文件源码
python
阅读 32
收藏 0
点赞 0
评论 0
评论列表
文章目录