def _han(self, input_words, embeddings, gene, variation, batch_size, embeddings_size,
num_hidden, dropout, word_output_size, sentence_output_size, training=True):
input_words = tf.reshape(input_words, [batch_size, MAX_SENTENCES, MAX_WORDS_IN_SENTENCE])
embedded_sequence, sentences_length, words_length = \
self._embed_sequence_with_length(embeddings, input_words)
_, sentence_size, word_size, _ = tf.unstack(tf.shape(embedded_sequence))
# RNN word level
with tf.variable_scope('word_level'):
word_level_inputs = tf.reshape(embedded_sequence,
[batch_size * sentence_size, word_size, embeddings_size])
word_level_lengths = tf.reshape(words_length, [batch_size * sentence_size])
word_level_output = self._bidirectional_rnn(word_level_inputs, word_level_lengths,
num_hidden)
word_level_output = tf.reshape(word_level_output, [batch_size, sentence_size, word_size,
num_hidden * 2])
word_level_output = self._attention(word_level_output, word_output_size, gene,
variation)
word_level_output = layers.dropout(word_level_output, keep_prob=dropout,
is_training=training)
# RNN sentence level
with tf.variable_scope('sentence_level'):
sentence_level_inputs = tf.reshape(word_level_output,
[batch_size, sentence_size, word_output_size])
sentence_level_output = self._bidirectional_rnn(sentence_level_inputs, sentences_length,
num_hidden)
sentence_level_output = self._attention(sentence_level_output, sentence_output_size,
gene, variation)
sentence_level_output = layers.dropout(sentence_level_output, keep_prob=dropout,
is_training=training)
return sentence_level_output
text_classification_model_han.py 文件源码
python
阅读 24
收藏 0
点赞 0
评论 0
评论列表
文章目录