test_autograd.py 文件源码

python
阅读 27 收藏 0 点赞 0 评论 0

项目:pytorch 作者: tylergenter 项目源码 文件源码
def test_accumulate_grad(self):
        import sys

        grad_output = Variable(torch.ones(5, 5))
        for start_volatile, end_volatile in product((True, False), repeat=2):
            go1 = grad_output.data if start_volatile else grad_output
            go2 = grad_output.data if end_volatile else grad_output

            x = Variable(torch.randn(5, 5), requires_grad=True)
            y = x + 2
            y.backward(go1, retain_variables=True)
            x_grad = x.grad
            x_grad_clone = x.grad.data.clone()

            del x
            y.backward(go2)

            # That's the only case when we can accumulate in-place
            if start_volatile and end_volatile:
                expected_grad = x_grad_clone * 2
            else:
                expected_grad = x_grad_clone
            self.assertEqual(x_grad.data, expected_grad)
评论列表
文章目录


问题


面经


文章

微信
公众号

扫码关注公众号