def xgb_common(train2, y, test2, v, z, N_seeds, N_splits, cname, xgb_params):
scores = []
skf = model_selection.StratifiedKFold(n_splits=N_splits, shuffle=True)
dtest = xgb.DMatrix(test2)
for s in range(N_seeds):
cname2 = cname + str(s)
v[cname2], z[cname2] = 0, 0
xgb_params['seed'] = s + 4242
for n, (itrain, ival) in enumerate(skf.split(train2, y)):
dtrain = xgb.DMatrix(train2.ix[itrain], y[itrain])
dvalid = xgb.DMatrix(train2.ix[ival], y[ival])
watch = [(dtrain, 'train'), (dvalid, 'valid')]
clf = xgb.train(xgb_params, dtrain, 10000, watch, early_stopping_rounds=100, verbose_eval=False)
p = clf.predict(dvalid)
v.loc[ival, cname2] += pconvert(p)
score = metrics.log_loss(y[ival], p)
z[cname2] += pconvert(clf.predict(dtest))
print(cname, 'seed %d step %d of %d: '%(xgb_params['seed'], n+1, skf.n_splits), score, now())
scores.append(score)
z[cname2] /= N_splits
vloss = [metrics.log_loss(y, prestore(v[cname + str(i)])) for i in range(N_seeds)]
print('validation loss: ', vloss, np.mean(vloss), np.std(vloss))
cv=np.array(scores)
print(cv, cv.mean(), cv.std())
评论列表
文章目录