+#' CVvoting_core
+#'
+#' "voting" cross-validation method, added here as an example.
+#' Parameters are described in ?agghoo and ?AgghooCV
+CVvoting_core <- function(data, target, task, gmodel, params, loss, CV) {
+ CV <- checkCV(CV)
+ n <- nrow(data)
+ shuffle_inds <- NULL
+ if (CV$type == "vfold" && CV$shuffle)
+ shuffle_inds <- sample(n, n)
+ bestP <- rep(0, gmodel$nmodels)
+ gmodel <- agghoo::Model$new(data, target, task, gmodel, params)
+ for (v in seq_len(CV$V)) {
+ test_indices <- get_testIndices(n, CV, v, shuffle_inds)
+ d <- splitTrainTest(data, target, test_indices)
+ best_p <- NULL
+ best_error <- Inf
+ for (p in seq_len(gmodel$nmodels)) {
+ model_pred <- gmodel$get(d$dataTrain, d$targetTrain, p)
+ prediction <- model_pred(d$dataTest)
+ error <- loss(prediction, d$targetTest)
+ if (error <= best_error) {
+ if (error == best_error)
+ best_p[[length(best_p)+1]] <- p
+ else {
+ best_p <- list(p)
+ best_error <- error
+ }
+ }
+ }
+ for (p in best_p)
+ bestP[p] <- bestP[p] + 1
+ }
+ # Choose a param at random in case of ex-aequos:
+ maxP <- max(bestP)
+ chosenP <- sample(which(bestP == maxP), 1)
+ list(model=gmodel$get(data, target, chosenP), param=gmodel$getParam(chosenP))
+}
+