我尝试使用 tuneParams() 和 resample(),它们都来自 mlr 包,来仔细检查我的交叉验证 RMSE。
但是,我无法让 2 个函数产生相同的结果。
通过 mlr 包调整参数:
train <- cars
invisible(library(mlr))
invisible(library(mlrMBO))
invisible(library(doParallel))
set.seed(0)
# Leaner
lrn <- makeLearner("regr.xgboost", par.vals = list(eta = 0.3, objective = "reg:linear"))
lrn <- makePreprocWrapperCaret(lrn, ppc.scale = TRUE, ppc.center = TRUE)
# Task
task <- makeRegrTask(data = train, target = "dist")
# Resampling strategy
cv_desc <- makeResampleDesc('CV', iters = 4)
cv_inst <- makeResampleInstance(cv_desc, task = task)
# Parameter set
ps <- makeParamSet(
makeIntegerParam("nrounds", lower = 30, upper = 60),
makeNumericParam("lambda", lower = 0, upper = 1),
makeNumericParam("alpha", lower = 0, upper = 1)
)
# Control
mbo.ctrl <- makeMBOControl()
mbo.ctrl <- setMBOControlTermination(mbo.ctrl, iters = 50)
ctrl <- mlr:::makeTuneControlMBO(mbo.control = mbo.ctrl)
# Tune model:
cl <- makeCluster(detectCores(), type='PSOCK')
registerDoParallel(cl)
params_res <- tuneParams(lrn, task, cv_inst, par.set = ps, control = ctrl,
show.info = FALSE, measures = mlr::rmse)
registerDoSEQ()
print(params_res)
尝试使用 resample 函数重现 RMSE:
set.seed(0)
lrn <- makeLearner("regr.xgboost", par.vals = params_res$x)
lrn <- makePreprocWrapperCaret(lrn, ppc.scale = TRUE, ppc.center = TRUE)
r = resample(lrn, task, cv_inst, measures = mlr::rmse)
mean(r$measures.test$rmse)
最佳答案
mlr 使用的聚合度量在 tutorial 中描述。
对于 RMSE,使用 test.rmse
。这意味着,所有测试性能都是通过 RMSE 汇总的,而不是算术平均值。
train <- cars
#invisible(library(mlr))
invisible(library(mlrMBO))
#> Loading required package: mlr
#> Loading required package: ParamHelpers
#> Registered S3 methods overwritten by 'ggplot2':
#> method from
#> [.quosures rlang
#> c.quosures rlang
#> print.quosures rlang
#> Loading required package: smoof
#> Loading required package: BBmisc
#>
#> Attaching package: 'BBmisc'
#> The following object is masked from 'package:base':
#>
#> isFALSE
#> Loading required package: checkmate
invisible(library(doParallel))
#> Loading required package: foreach
#> Loading required package: iterators
#> Loading required package: parallel
set.seed(0)
# Leaner
lrn <- makeLearner("regr.xgboost", par.vals = list(eta = 0.3, objective = "reg:linear"))
#> Warning in makeParam(id = id, type = "numeric", learner.param = TRUE, lower = lower, : NA used as a default value for learner parameter missing.
#> ParamHelpers uses NA as a special value for dependent parameters.
lrn <- makePreprocWrapperCaret(lrn, ppc.scale = TRUE, ppc.center = TRUE)
# Task
task <- makeRegrTask(data = train, target = "dist")
# Resampling strategy
cv_desc <- makeResampleDesc('CV', iters = 4)
cv_inst <- makeResampleInstance(cv_desc, task = task)
# Parameter set
ps <- makeParamSet(
makeIntegerParam("nrounds", lower = 30, upper = 60),
makeNumericParam("lambda", lower = 0, upper = 1),
makeNumericParam("alpha", lower = 0, upper = 1)
)
# Control
mbo.ctrl <- makeMBOControl()
mbo.ctrl <- setMBOControlTermination(mbo.ctrl, iters = 50)
ctrl <- mlr:::makeTuneControlMBO(mbo.control = mbo.ctrl)
# Tune model:
cl <- makeCluster(detectCores(), type='PSOCK')
registerDoParallel(cl)
params_res <- tuneParams(lrn, task, cv_inst, par.set = ps, control = ctrl,
show.info = FALSE, measures = mlr::rmse)
registerDoSEQ()
print(params_res)
#> Tune result:
#> Op. pars: nrounds=30; lambda=0.994; alpha=1
#> rmse.test.rmse=17.4208912
lrn <- makeLearner("regr.xgboost", par.vals = params_res$x)
#> Warning in makeParam(id = id, type = "numeric", learner.param = TRUE, lower = lower, : NA used as a default value for learner parameter missing.
#> ParamHelpers uses NA as a special value for dependent parameters.
lrn <- makePreprocWrapperCaret(lrn, ppc.scale = TRUE, ppc.center = TRUE)
r = resample(lrn, task, cv_inst, measures = mlr::rmse)
#> Resampling: cross-validation
#> Measures: rmse
#> [Resample] iter 1: 17.0026234
#> [Resample] iter 2: 16.5500225
#> [Resample] iter 3: 21.5016809
#> [Resample] iter 4: 13.7344482
#>
#> Aggregated Result: rmse.test.rmse=17.4208912
#>
all.equal(as.numeric(r$aggr), as.numeric(params_res$y))
#> [1] TRUE
all.equal(sqrt(mean(r$measures.test$rmse^2)), as.numeric(params_res$y))
#> [1] TRUE
由 reprex package (v0.2.1) 于 2019-05-18 创建
关于r - mlr 包 : Cross-validation with tuneParams() and resample() yield different results,我们在Stack Overflow上找到一个类似的问题:https://stackoverflow.com/questions/56197388/