def score(self, params):
print "Training with params : "
print params
N_boost_round=[]
Score=[]
skf = cross_validation.StratifiedKFold(self.train_y, n_folds=6, shuffle=True, random_state=25)
for train, test in skf:
X_Train, X_Test, y_Train, y_Test = self.train_X[train], self.train_X[test], self.train_y[train], self.train_y[test]
dtrain = xgb.DMatrix(X_Train, label=y_Train)
dvalid = xgb.DMatrix(X_Test, label=y_Test)
watchlist = [(dtrain, 'train'),(dvalid, 'eval')]
model = xgb.train(params, dtrain, num_boost_round=150, evals=watchlist, early_stopping_rounds=10)
predictions = model.predict(dvalid)
N = model.best_iteration
N_boost_round.append(N)
score = model.best_score
Score.append(score)
Average_best_num_boost_round = np.average(N_boost_round)
Average_best_score = np.average(Score)
print "\tAverage of best iteration {0}\n".format(Average_best_num_boost_round)
print "\tScore {0}\n\n".format(Average_best_score)
return {'loss': Average_best_score, 'status': STATUS_OK, 'Average_best_num_boost_round': Average_best_num_boost_round}
tuning.py 文件源码
python
阅读 29
收藏 0
点赞 0
评论 0
评论列表
文章目录