def _backward_sum(gy, in_shape):
xp = cuda.get_array_module(gy)
sum_axis = (1, 2)
keepdims = True
if not (len(in_shape) == 0 or sum_axis is None or keepdims):
actual_axis = []
for axis in sum_axis:
if axis < 0:
axis += len(in_shape)
actual_axis.append(axis)
for axis in sorted(actual_axis):
gy = xp.expand_dims(gy, axis=axis)
if hasattr(xp, 'broadcast_to'):
gx = xp.broadcast_to(gy, in_shape)
else:
# NumPy 1.9 does not support broadcast_to.
dummy_x = xp.empty(in_shape, 'b')
gx, _ = xp.broadcast_arrays(gy, dummy_x)
return gx
评论列表
文章目录