def plot_learning_curve(estimator, title, X, y,
ylim=None, cv=None, scoring=None,
n_jobs=1, train_sizes=np.linspace(0.1, 1.0, 5)):
"""
Generate a simple plot of the test and training learning curve
Parameters
----------
estimator: object type that implements the "fit" and "predict" methods.
title: string; title for the chart.
X: traning vector, shape (n_samples, n_features)
y: target, shape (n_samples,)
ylim: tuple, shape (ymin, ymax)
Defines minimum and maximum yvalues plotted.
cv: int, cross-validation generator or an iterable
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the dafault 3-fold cross-validation
- Interger, to specify the number of folds
- An object to be used as a cross-validation generator
"""
from sklearn.model_selection import learning_curve
plt.figure()
plt.title(title)
if ylim is not None:
plt.ylim(*ylim)
plt.xlabel("Training examples")
plt.ylabel("Score")
train_sizes, train_scores, test_scores = learning_curve(
estimator, X, y, cv=cv, n_jobs=n_jobs,
train_sizes=train_sizes, scoring=scoring)
train_scores_mean = np.mean(train_scores, axis=1)
train_scores_std = np.std(train_scores, axis=1)
test_scores_mean = np.mean(test_scores, axis=1)
test_scores_std = np.std(test_scores, axis=1)
plt.grid()
plt.fill_between(train_sizes, train_scores_mean - train_scores_std,
train_scores_mean + train_scores_std, alpha=0.1,
color='r')
plt.fill_between(train_sizes, test_scores_mean - test_scores_std,
test_scores_mean + test_scores_std, alpha=0.1,
color='g')
plt.plot(train_sizes, train_scores_mean, 'o-', color='r',
label="Training score")
plt.plot(train_sizes, test_scores_mean, 'o-', color='g',
label="Cross-validation score")
plt.legend(loc="best")
return plt,train_sizes
power_prediction.py 文件源码
python
阅读 28
收藏 0
点赞 0
评论 0
评论列表
文章目录