def check_backward(self, x_data, W_data, b_data, y_grad):
xp = cuda.get_array_module(x_data)
if not self.c_contiguous:
x_data = xp.asfortranarray(x_data)
W_data = xp.asfortranarray(W_data)
y_grad = xp.asfortranarray(y_grad)
self.assertFalse(x_data.flags.c_contiguous)
self.assertFalse(W_data.flags.c_contiguous)
self.assertFalse(y_grad.flags.c_contiguous)
if b_data is not None:
b = xp.empty((len(b_data) * 2,), dtype=self.b.dtype)
b[::2] = b_data
b_data = b[::2]
self.assertFalse(b_data.flags.c_contiguous)
args = (x_data, W_data)
if b_data is not None:
args = args + (b_data,)
gradient_check.check_backward(
convolution_2d.Convolution2DFunction(
self.stride, self.pad, self.use_cudnn, self.cover_all),
args, y_grad, eps=1e-2)
评论列表
文章目录