def param_grad(self, k_param):
"""
Returns gradient over hyperparameters. It is recommended to use `self._grad` instead.
Parameters
----------
k_param: dict
Dictionary with keys being hyperparameters and values their queried values.
Returns
-------
np.ndarray
Gradient corresponding to each hyperparameters. Order given by `k_param.keys()`
"""
k_param_key = list(k_param.keys())
covfunc = self.covfunc.__class__(**k_param)
n = self.X.shape[0]
K = covfunc.K(self.X, self.X)
L = cholesky(K).T
alpha = solve(L.T, solve(L, self.y))
inner = np.dot(np.atleast_2d(alpha).T, np.atleast_2d(alpha)) - np.linalg.inv(K)
grads = []
for param in k_param_key:
gradK = covfunc.gradK(self.X, self.X, param=param)
gradK = .5 * np.trace(np.dot(inner, gradK))
grads.append(gradK)
return np.array(grads)
评论列表
文章目录