def __init__(self, rnn_class=LSTM, encoder_dims=50, bidirectional=True, dropout_rate=0.5, **rnn_kwargs):
"""Creates an RNN model with attention. The attention mechanism is implemented as described
in https://www.cs.cmu.edu/~hovy/papers/16HLT-hierarchical-attention-networks.pdf, but without
sentence level attention.
Args:
rnn_class: The type of RNN to use. (Default Value = LSTM)
encoder_dims: The number of hidden units of RNN. (Default Value: 50)
bidirectional: Whether to use bidirectional encoding. (Default Value = True)
**rnn_kwargs: Additional args for building the RNN.
"""
super(AttentionRNN, self).__init__(dropout_rate)
self.rnn_class = rnn_class
self.encoder_dims = encoder_dims
self.bidirectional = bidirectional
self.rnn_kwargs = rnn_kwargs
评论列表
文章目录