def __call__(self, x):
h = add_noise(x)
h = F.leaky_relu(add_noise(self.c0_0(h)))
h = F.leaky_relu(add_noise(self.bn0_1(self.c0_1(h))))
h = F.leaky_relu(add_noise(self.bn1_0(self.c1_0(h))))
h = F.leaky_relu(add_noise(self.bn1_1(self.c1_1(h))))
h = F.leaky_relu(add_noise(self.bn2_0(self.c2_0(h))))
h = F.leaky_relu(add_noise(self.bn2_1(self.c2_1(h))))
h = F.leaky_relu(add_noise(self.bn3_0(self.c3_0(h))))
return self.l4(h)
python类leaky_relu()的实例源码
def __call__(self, x, test=False):
h = F.leaky_relu(self.c0(x))
for idx in range(1, self.n_layers):
h = F.leaky_relu(self['b{}'.format(idx)](self['c{}'.format(idx)](h), test=test))
h = F.leaky_relu(self['b{}'.format(self.n_layers)](self['c{}'.format(self.n_layers)](h), test=test))
h = F.sigmoid(self.c(h))
return h
def __init__(self, in_ch):
layers = {}
w = chainer.initializers.Normal(0.02)
layers['c0'] = L.Convolution2D(in_ch, 64, 3, 1, 1, initialW=w)
layers['c1'] = CBR(64, 128, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
layers['c2'] = CBR(128, 256, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
layers['c3'] = CBR(256, 512, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
layers['c4'] = CBR(512, 512, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
layers['c5'] = CBR(512, 512, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
layers['c6'] = CBR(512, 512, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
layers['c7'] = CBR(512, 512, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
super(Encoder, self).__init__(**layers)
def __call__(self, x):
hs = [F.leaky_relu(self.c0(x))]
for i in range(1,8):
hs.append(self['c%d'%i](hs[i-1]))
return hs
def __init__(self, in_ch, out_ch):
layers = {}
w = chainer.initializers.Normal(0.02)
layers['c0_0'] = CBR(in_ch, 32, bn=False, sample='down', activation=F.leaky_relu, dropout=False)
layers['c0_1'] = CBR(out_ch, 32, bn=False, sample='down', activation=F.leaky_relu, dropout=False)
layers['c1'] = CBR(64, 128, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
layers['c2'] = CBR(128, 256, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
layers['c3'] = CBR(256, 512, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
layers['c4'] = L.Convolution2D(512, 1, 3, 1, 1, initialW=w)
super(Discriminator, self).__init__(**layers)
def __call__(self, x):
h = F.leaky_relu(self.c0_0(x))
h = F.leaky_relu(self.bn0_1(self.c0_1(h)))
h = F.leaky_relu(self.bn1_0(self.c1_0(h)))
h = F.leaky_relu(self.bn1_1(self.c1_1(h)))
h = F.leaky_relu(self.bn2_0(self.c2_0(h)))
h = F.leaky_relu(self.bn2_1(self.c2_1(h)))
h = F.leaky_relu(self.bn3_0(self.c3_0(h)))
return self.l4(h)
def __call__(self, x):
h = x
h = F.leaky_relu(self.c0(h))
h = F.leaky_relu(self.c1(h))
h = F.leaky_relu(self.c2(h))
h = F.leaky_relu(self.c3(h))
h = F.leaky_relu(self.l4(h))
h = F.reshape(F.leaky_relu(self.l5(h)),
(x.data.shape[0], self.ch, 4, 4))
h = F.leaky_relu(self.dc3(h))
h = F.leaky_relu(self.dc2(h))
h = F.leaky_relu(self.dc1(h))
h = F.tanh(self.dc0(h))
return F.mean_absolute_error(h, x)
def __call__(self, x):
h = self.bn1(F.leaky_relu(self.l1(x)), test=not self.train)
h = self.bn2(F.leaky_relu(self.l2(h)), test=not self.train)
h = self.bn3(F.leaky_relu(self.l3(h)), test=not self.train)
h = self.bn4(F.leaky_relu(self.l4(h)), test=not self.train)
y = self.l5(h)
return y
def __init__(self, in_ch):
layers = {}
w = chainer.initializers.Normal(0.02)
layers['c0'] = L.Convolution2D(in_ch, 64, 3, 1, 1, initialW=w)
layers['c1'] = CBR(64, 128, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
layers['c2'] = CBR(128, 256, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
layers['c3'] = CBR(256, 512, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
layers['c4'] = CBR(512, 512, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
layers['c5'] = CBR(512, 512, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
layers['c6'] = CBR(512, 512, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
layers['c7'] = CBR(512, 512, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
super(Encoder, self).__init__(**layers)
def __call__(self, x):
hs = [F.leaky_relu(self.c0(x))]
for i in range(1, 8):
hs.append(self['c%d' % i](hs[i - 1]))
return hs
def __init__(self, in_ch, out_ch, will_concat=True, layers={}):
self.will_concat = will_concat
channel_expansion = 2 if will_concat else 1
w = chainer.initializers.Normal(0.02)
layers['c0_0'] = CBR(in_ch, 32, bn=False, sample='down', activation=F.leaky_relu, dropout=False)
layers['c0_1'] = CBR(out_ch, 32, bn=False, sample='down', activation=F.leaky_relu, dropout=False)
layers['c1'] = CBR(32 * channel_expansion, 128, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
layers['c2'] = CBR(128, 256, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
layers['c3'] = CBR(256, 512, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
layers['c4'] = L.Convolution2D(512, 1, 3, 1, 1, initialW=w)
super(Discriminator, self).__init__(**layers)
def __call__(self, x):
l1 = F.leaky_relu(self.l1(x))
l2 = F.leaky_relu(self.l2(l1))
out = self.l3(l2)
return out
def __call__(self, x, train=True):
h = F.leaky_relu(self.c0_0(x))
h = F.leaky_relu(self.bn0_1(self.c0_1(h), test=not train))
h = F.leaky_relu(self.bn1_1(self.c1_1(h), test=not train))
h = F.leaky_relu(self.bn2_1(self.c2_1(h), test=not train))
h = F.leaky_relu(self.bn3_0(self.c3_0(h), test=not train))
h = self.l4(h)
return F.sum(h) / h.size
def __call__(self, x, train=True):
h = add_noise(x, test=not train)
h = F.leaky_relu(add_noise(self.c0_0(h), test=not train))
h = F.leaky_relu(add_noise(self.bn0_1(self.c0_1(h), test=not train), test=not train))
h = F.leaky_relu(add_noise(self.bn1_1(self.c1_1(h), test=not train), test=not train))
h = F.leaky_relu(add_noise(self.bn2_1(self.c2_1(h), test=not train), test=not train))
h = F.leaky_relu(add_noise(self.bn3_0(self.c3_0(h), test=not train), test=not train))
h = self.l4(h)
return F.sum(h) / h.size
def __call__(self, x, train=True):
h = add_noise(x, test=not train)
h = F.leaky_relu(add_noise(self.c0_0(h), test=not train))
h = F.leaky_relu(add_noise(self.bn0_1(self.c0_1(h), test=not train), test=not train))
h = F.leaky_relu(add_noise(self.bn1_0(self.c1_0(h), test=not train), test=not train))
h = F.leaky_relu(add_noise(self.bn1_1(self.c1_1(h), test=not train), test=not train))
h = F.leaky_relu(add_noise(self.bn2_0(self.c2_0(h), test=not train), test=not train))
h = F.leaky_relu(add_noise(self.bn2_1(self.c2_1(h), test=not train), test=not train))
h = F.leaky_relu(add_noise(self.bn3_0(self.c3_0(h), test=not train), test=not train))
h = self.l4(h)
return F.sum(h) / h.size
def __call__(self, x, train=True):
h = F.leaky_relu(self.c0_0(x))
h = F.leaky_relu(self.c0_1(h))
h = F.leaky_relu(self.c1_0(h))
h = F.leaky_relu(self.c1_1(h))
h = F.leaky_relu(self.c2_0(h))
h = F.leaky_relu(self.c2_1(h))
h = F.leaky_relu(self.c3_0(h))
h = self.l4(h)
return F.sum(h) / h.size
def __call__(self, x, train=True):
h1 = F.leaky_relu(self.enc1(x))
h2 = F.leaky_relu(self.norm2(self.enc2(h1), test=not train))
h3 = F.leaky_relu(self.norm3(self.enc3(h2), test=not train))
h4 = F.leaky_relu(self.norm4(self.enc4(h3), test=not train))
mean = self.mean(h4)
ln_var = self.ln_var(h4)
return mean, ln_var
def __call__(self, x, train=True):
h = add_noise(x, test=not train)
h = F.leaky_relu(add_noise(self.c0_0(h), test=not train))
h = F.leaky_relu(add_noise(self.bn0_1(self.c0_1(h), test=not train), test=not train))
h = F.leaky_relu(add_noise(self.bn1_1(self.c1_1(h), test=not train), test=not train))
h2 = F.leaky_relu(add_noise(self.bn2_1(self.c2_1(h), test=not train), test=not train))
h3 = F.leaky_relu(add_noise(self.bn3_0(self.c3_0(h2), test=not train), test=not train))
h = self.l4(h3)
return F.sum(h) / h.size, h2, h3
def __call__(self, x, train=True):
h = F.leaky_relu(self.c0_0(x))
h = F.leaky_relu(self.bn0_1(self.c0_1(h), test=not train))
h = F.leaky_relu(self.bn1_1(self.c1_1(h), test=not train))
h2 = F.leaky_relu(self.bn2_1(self.c2_1(h), test=not train))
h3 = F.leaky_relu(self.bn3_0(self.c3_0(h2), test=not train))
h = self.l4(h3)
return F.sum(h) / h.size, h2, h3
def __call__(self, x, train=True):
h = F.leaky_relu(self.c0_0(x))
h = F.leaky_relu(self.bn0_1(self.c0_1(h), test=not train))
h = F.leaky_relu(self.bn1_1(self.c1_1(h), test=not train))
h = F.leaky_relu(self.bn2_1(self.c2_1(h), test=not train))
h = F.leaky_relu(self.bn3_0(self.c3_0(h), test=not train))
mean = self.mean(h)
ln_var = self.ln_var(h)
return mean, ln_var