def __init__(self, in_ch=3, base_size=128, down_layers=4, use_bn=True, w_init=None, output_len=38):
layers = {}
self.down_layers = down_layers
if use_bn:
norm = 'bn'
else:
norm = None
act = F.leaky_relu
if w_init is None:
w_init = chainer.initializers.Normal(0.02)
layers['c_first'] = NNBlock(in_ch, base_size, nn='down_conv', norm=None, activation=act, w_init=w_init)
base = base_size
for i in range(down_layers-1):
layers['c'+str(i)] = NNBlock(base, base*2, nn='down_conv', norm=norm, activation=act, w_init=w_init)
base*=2
layers['c_last_0'] = NNBlock(None, 1, nn='linear', norm=None, activation=None, w_init=w_init)
layers['c_last_1_0'] = NNBlock(None, output_len, nn='linear', norm=None, activation=None, w_init=None)
#layers['c_last_1_1'] = NNBlock(1024, 1024, nn='linear', norm=None, activation=F.leaky_relu, w_init=None)
#layers['c_last_1_2'] = NNBlock(1024, output_len, nn='linear', norm=None, activation=None, w_init=None)
super(ACGANDiscriminator, self).__init__(**layers)
评论列表
文章目录