我想使用交叉验证优化 XGboost 的超参数。但是,尚不清楚如何从中获取模型xgb.cv
。例如,我objective(params)
从fmin
. 然后在 上拟dtrain
合并验证模型dvalid
。如果我想使用 KFold 交叉验证而不是训练dtrain
怎么办?
from hyperopt import fmin, tpe
import xgboost as xgb
params = {
'n_estimators' : hp.quniform('n_estimators', 100, 1000, 1),
'eta' : hp.quniform('eta', 0.025, 0.5, 0.025),
'max_depth' : hp.quniform('max_depth', 1, 13, 1)
#...
}
best = fmin(objective, space=params, algo=tpe.suggest)
def objective(params):
dtrain = xgb.DMatrix(X_train, label=y_train)
dvalid = xgb.DMatrix(X_valid, label=y_valid)
watchlist = [(dtrain, 'train'), (dvalid, 'eval')]
model = xgb.train(params, dtrain, num_boost_round,
evals=watchlist, feval=myFunc)
# xgb.cv(param, dtrain, num_boost_round, nfold = 5, seed = 0,
# feval=myFunc)