def check_gradient(self, X, T, eps=1e-10):
thetas = self.flatten()
grad1 = self.numerical_gradient(thetas, X, T)
_, grad2 = self.compute_cost(thetas, X, T)
diff = linalg.norm(grad1 - grad2) / linalg.norm(grad1 + grad2)
print(np.c_[grad1, grad2, np.abs(grad1 - grad2)])
print('diff = {0}'.format(diff))
return diff < eps
评论列表
文章目录