def __init__(self, ch0, ch1, bn=True, sample='down', activation=F.relu, dropout=False, noise=False):
self.bn = bn
self.activation = activation
self.dropout = dropout
self.sample = sample
self.noise = noise
layers = {}
w = chainer.initializers.Normal(0.02)
if sample=='down':
layers['c'] = L.Convolution2D(ch0, ch1, 4, 2, 1, initialW=w)
elif sample=='none-9':
layers['c'] = L.Convolution2D(ch0, ch1, 9, 1, 4, initialW=w)
elif sample=='none-7':
layers['c'] = L.Convolution2D(ch0, ch1, 7, 1, 3, initialW=w)
elif sample=='none-5':
layers['c'] = L.Convolution2D(ch0, ch1, 5, 1, 2, initialW=w)
else:
layers['c'] = L.Convolution2D(ch0, ch1, 3, 1, 1, initialW=w)
if bn:
if self.noise:
layers['batchnorm'] = L.BatchNormalization(ch1, use_gamma=False)
else:
layers['batchnorm'] = L.BatchNormalization(ch1)
super(CBR, self).__init__(**layers)
评论列表
文章目录