def check_backward(self, x_data, W_data, b_data, y_grad, use_gpu=False):
xp = cuda.get_array_module(x_data)
if not self.c_contiguous:
x_data = xp.asfortranarray(x_data)
W_data = xp.asfortranarray(W_data)
y_grad = xp.asfortranarray(y_grad)
self.assertFalse(x_data.flags.c_contiguous)
self.assertFalse(W_data.flags.c_contiguous)
self.assertFalse(y_grad.flags.c_contiguous)
if b_data is not None:
b = xp.empty((len(b_data) * 2,), dtype=self.b.dtype)
b[::2] = b_data
b_data = b[::2]
self.assertFalse(b_data.flags.c_contiguous)
func = graph_convolution.GraphConvolutionFunction(self.L, self.K)
if use_gpu:
func.to_gpu()
args = (x_data, W_data)
if b_data is not None:
args = args + (b_data,)
gradient_check.check_backward(
func, args, y_grad,
**self.check_backward_options
)
test_graph_convolution.py 文件源码
python
阅读 20
收藏 0
点赞 0
评论 0
评论列表
文章目录