def fgrad(we, X, y, l1, l2):
nsamples, nfactors = X.shape
w0 = we[0]
w = we[1:(nfactors+1)] - we[(nfactors+1):]
yz = y * (safe_sparse_dot(X, w) + w0)
f = - np.sum(log_logistic(yz)) + l1 * np.sum(we[1:]) + 0.5 * l2 * np.dot(w, w)
e = (expit(yz) - 1) * y
g = safe_sparse_dot(X.T, e) + l2 * w
g0 = np.sum(e)
grad = np.concatenate([g, -g]) + l1
grad = np.insert(grad, 0, g0)
return f, grad
评论列表
文章目录