def grad(self, inp, cost_grad):
"""
Notes
-----
The gradient is currently implemented for matrices only.
"""
a, val, offset = inp
grad = cost_grad[0]
height, width = grad.shape
if (a.dtype.startswith('complex')):
return [None, None]
# only valid for matrices
wr_a = fill_diagonal_offset(grad, 0, offset)
offset_abs = basic.abs_(offset)
pos_offset_flag = basic.ge(offset, 0)
neg_offset_flag = basic.lt(offset, 0)
min_wh = basic.minimum(width, height)
start = offset * pos_offset_flag + offset_abs * width * neg_offset_flag
num_of_step = basic.minimum(min_wh, width * pos_offset_flag +
height * neg_offset_flag - offset_abs)
step = a.shape[1] + 1
end = start + step * num_of_step
# input of slice should be integer
start = basic.cast(start, 'int32')
step = basic.cast(step, 'int32')
end = basic.cast(end, 'int32')
wr_val = grad.flatten()[start:end:step].sum()
wr_offset = theano.gradient.grad_undefined(
self, 2, offset,
"offset is not defined for non-integer offset so"
" fill_diagonal_offset(a,val,offset+eps) is undefined")
return [wr_a, wr_val, wr_offset]
评论列表
文章目录