def __init__(self, hidden_size=768, output_size=1, use_bn=True):
if use_bn:
norm = 'bn'
w_init=None
else:
norm = None
w_init=None
#w_init=Chainer.initializers.HeNormal()
super(ThreeLayersMLP, self).__init__(
l0 = NNBlock(None, hidden_size, norm=norm, nn='linear', w_init=w_init, activation=F.leaky_relu),
l1 = NNBlock(hidden_size, hidden_size, norm=norm, nn='linear', w_init=w_init, activation=F.leaky_relu),
l2 = NNBlock(hidden_size, output_size, norm=None, activation=None, nn='linear', w_init=w_init),
)
评论列表
文章目录