def grad(self, inp, cost_grad):
"""
Notes
-----
The gradient is currently implemented for matrices only.
"""
a, val = inp
grad = cost_grad[0]
if (a.dtype.startswith('complex')):
return [None, None]
elif a.ndim > 2:
raise NotImplementedError('%s: gradient is currently implemented'
' for matrices only' %
self.__class__.__name__)
wr_a = fill_diagonal(grad, 0) # valid for any number of dimensions
# diag is only valid for matrices
wr_val = theano.tensor.nlinalg.diag(grad).sum()
return [wr_a, wr_val]
评论列表
文章目录