def __init__(self, input_size, hidden_size, num_layers,
dropout_rate=0, dropout_output=False, rnn_type=nn.LSTM,
concat_layers=False, padding=False):
super(StackedBRNN, self).__init__()
self.padding = padding
self.dropout_output = dropout_output
self.dropout_rate = dropout_rate
self.num_layers = num_layers
self.concat_layers = concat_layers
self.rnns = nn.ModuleList()
for i in range(num_layers):
input_size = input_size if i == 0 else 2 * hidden_size
#self.rnns.append(rnn_type(input_size, hidden_size,
# num_layers=1,
# bidirectional=True))
self.rnns.append(MF.SRUCell(input_size, hidden_size,
dropout=dropout_rate,
rnn_dropout=dropout_rate,
use_tanh=1,
bidirectional=True))
评论列表
文章目录