def cv_LinearRegression_ci_pred_full_Ridge( xM, yV, alpha, n_splits = 5, shuffle=True, disp = False):
"""
Note - scoring is not used. I may used later. Not it is remained for compatibility purpose.
metrics.explained_variance_score(y_true, y_pred) Explained variance regression score function
metrics.mean_absolute_error(y_true, y_pred) Mean absolute error regression loss
metrics.mean_squared_error(y_true, y_pred[, ...]) Mean squared error regression loss
metrics.median_absolute_error(y_true, y_pred) Median absolute error regression loss
metrics.r2_score(y_true, y_pred[, ...]) R^2 (coefficient of determination) regression score function.
"""
if disp:
print(xM.shape, yV.shape)
# print( 'alpha of Ridge is', alpha)
clf = linear_model.Ridge( alpha)
kf5_c = model_selection.KFold( n_splits=n_splits, shuffle=shuffle)
kf5 = kf5_c.split( xM)
cv_score_l = list()
ci_l = list()
yVp = yV.copy()
for train, test in kf5:
# clf.fit( xM[train,:], yV[train,:])
# yV is vector but not a metrix here. Hence, it should be treated as a vector
clf.fit( xM[train,:], yV[train])
yVp_test = clf.predict( xM[test,:])
yVp[test] = yVp_test
# Additionally, coef_ and intercept_ are stored.
ci_l.append( (clf.coef_, clf.intercept_))
y_a = np.array( yV[test])[:,0]
yp_a = np.array( yVp_test)[:,0]
cv_score_l.extend( np.abs(y_a - yp_a).tolist())
return cv_score_l, ci_l, yVp.A1.tolist()
评论列表
文章目录