def finite_diff_gradients(self, f, delta=1e-6):
"""
f is called without parameters, the changes in the parameters happen as a side effect
"""
gradients = dict()
fx = f()
for p in self.parameters_to_optimise:
original = self.parameters[p].get_value()
grad = np.zeros_like(original)
if np.prod(original.shape) > 1:
for index, _ in np.ndenumerate(original):
xh = original.copy()
xh[index] += delta
self.parameters[p].set_value(xh)
grad[index] = (f() - fx) / delta
self.parameters[p].set_value(original)
else:
xh = original.copy()
xh += delta
self.parameters[p].set_value(xh)
grad = (f() - fx) / delta
self.parameters[p].set_value(original)
gradients[p] = grad
return gradients
评论列表
文章目录