I guess there are shorter solutions but the following is not so hacky.
We use a Wrapper to get hold of the model so we can save it in a list in the global environment. Alternatively you can change that line to something more sophisticated and save it on the hard disk. This might be worthwhile because models can get quite big.
library(mlr)
# Define the tuning problem
ps = makeParamSet(
makeDiscreteParam("C", values = 2^(-2:2)),
makeDiscreteParam("sigma", values = 2^(-2:2))
)
ctrl = makeTuneControlGrid()
rdesc = makeResampleDesc("Holdout")
lrn = makeLearner("classif.ksvm")
# Define a wrapper to save all models that were trained with it
makeSaveWrapper = function(learner) {
mlr:::makeBaseWrapper(
id = paste0(learner$id, "save", sep = "."),
type = learner$type,
next.learner = learner,
par.set = makeParamSet(),
par.vals = list(),
learner.subclass = "SaveWrapper",
model.subclass = "SaveModel")
}
trainLearner.SaveWrapper = function(.learner, .task, .subset, ...) {
m = train(.learner$next.learner, task = .task, subset = .subset)
stored.models <<- c(stored.models, list(m)) # not very efficient, maybe you want to save on hard disk here?
mlr:::makeChainModel(next.model = m, cl = "SaveModel")
}
predictLearner.SaveWrapper = function(.learner, .model, .newdata, ...) {
NextMethod(.newdata = .newdata)
}
stored.models = list() # initialize empty list to store results
lrn.saver = makeSaveWrapper(lrn)
res = tuneParams(lrn.saver, task = iris.task, resampling = rdesc, par.set = ps, control = ctrl)
stored.models[[1]] # the normal mlr trained model
stored.models[[1]]$learner.model # the underlying model
getLearnerParVals(stored.models[[1]]$learner) # the hyper parameter settings
stored.models[[1]]$subset # the indices used to train the model