def __init__(self, vocab_size, ndim_embedding, num_layers, ndim_h, kernel_size=4, pooling="fo", zoneout=0, dropout=0, weightnorm=False, wgain=1, densely_connected=False, ignore_label=None):
super(RNNModel, self).__init__(
embed=L.EmbedID(vocab_size, ndim_embedding, ignore_label=ignore_label),
fc=L.Convolution1D(ndim_h * num_layers if densely_connected else ndim_h, vocab_size, ksize=1, stride=1, pad=0, weightnorm=weightnorm, initialW=initializers.Normal(math.sqrt(wgain / ndim_h)))
)
assert num_layers > 0
self.vocab_size = vocab_size
self.ndim_embedding = ndim_embedding
self.num_layers = num_layers
self.ndim_h = ndim_h
self.kernel_size = kernel_size
self.pooling = pooling
self.zoneout = zoneout
self.weightnorm = weightnorm
self.using_dropout = True if dropout > 0 else False
self.dropout = dropout
self.wgain = wgain
self.ignore_label = ignore_label
self.densely_connected = densely_connected
with self.init_scope():
setattr(self, "qrnn0", L.QRNN(ndim_embedding, ndim_h, kernel_size=kernel_size, pooling=pooling, zoneout=zoneout, weightnorm=weightnorm, wgain=wgain))
for i in range(1, num_layers):
setattr(self, "qrnn{}".format(i), L.QRNN(ndim_h * i if densely_connected else ndim_h, ndim_h, kernel_size=kernel_size, pooling=pooling, zoneout=zoneout, weightnorm=weightnorm, wgain=wgain))
评论列表
文章目录