def __init__(self, in_channels, out_channels, kernel_size=2, zoneout=False, wgain=1., weightnorm=False):
super(QRNNGlobalAttentiveDecoder, self).__init__(in_channels, out_channels, kernel_size, "fo", zoneout, wgain, weightnorm)
wstd = math.sqrt(wgain / in_channels / kernel_size)
with self.init_scope():
setattr(self, "o", links.Linear(2 * out_channels, out_channels, initialW=initializers.Normal(wstd)))
# X is the input of the decoder
# ht_enc is the last encoder state
# H_enc is the encoder's las layer's hidden sates
评论列表
文章目录