updater.py 文件源码

python
阅读 19 收藏 0 点赞 0 评论 0

项目:chainer-gan-experiments 作者: Aixile 项目源码 文件源码
def update_core(self):
        xp = self.gen.xp
        self._iter += 1

        opt_g = self.get_optimizer('gen')
        opt_d = self.get_optimizer('dis')

        data_z = self.get_latent_code_batch()
        data_x = self.get_real_image_batch()

        x_fake = self.gen(Variable(data_z))



        dis_fake = self.dis(x_fake)

        loss_gen = loss_func_dcgan_dis_real(dis_fake)
        chainer.report({'loss': loss_gen}, self.gen)
        opt_g.zero_grads()
        loss_gen.backward()
        opt_g.update()

        x_fake.unchain_backward()

        std_data_x = xp.std(data_x, axis=0, keepdims=True)
        rnd_x = xp.random.uniform(0, 1, data_x.shape).astype("f")
        x_perturbed = Variable(data_x + 0.5*rnd_x*std_data_x)

        x_real = Variable(data_x)
        dis_real = self.dis(x_real)
        dis_perturbed = self.dis(x_perturbed, retain_forward=True)
        g = Variable(xp.ones_like(dis_perturbed.data))
        grad = self.dis.differentiable_backward(g)

        grad_l2 = F.sqrt(F.sum(grad**2, axis=(1, 2, 3)))
        loss_gp = self._lambda_gp * loss_l2(grad_l2, 1.0)

        loss_dis = loss_func_dcgan_dis_real(dis_real) + \
                    loss_func_dcgan_dis_fake(dis_fake) + \
                    loss_gp

        opt_d.zero_grads()
        loss_dis.backward()
        opt_d.update()

        chainer.report({'loss': loss_dis, 'loss_gp': loss_gp}, self.dis)
评论列表
文章目录


问题


面经


文章

微信
公众号

扫码关注公众号