'R6_AgghooCV.R'
'R6_Model.R'
'A_NAMESPACE.R'
+ 'checks.R'
+ 'compareTo.R'
+ 'utils.R'
#' @param task "regression" or "classification"
#' @param gmodel Generic model returning a predictive function
#' @param loss Function assessing the error of a prediction
- initialize = function(data, target, task, gmodel, loss = NULL) {
+ initialize = function(data, target, task, gmodel, loss) {
private$data <- data
private$target <- target
private$task <- task
private$gmodel <- gmodel
- if (is.null(loss))
- loss <- private$defaultLoss
private$loss <- loss
},
#' @description Fit an agghoo model.
#' - test_size: percentage of data in the test dataset, for MC
#' (irrelevant for V-fold). Default: 0.2. \cr
#' - shuffle: wether or not to shuffle data before V-fold.
- #' Irrelevant for Monte-Carlo; default: TRUE
- fit = function(
- CV = list(type = "MC",
- V = 10,
- test_size = 0.2,
- shuffle = TRUE)
- ) {
- if (!is.list(CV))
- stop("CV: list of type, V, [test_size], [shuffle]")
+ #' Irrelevant for Monte-Carlo; default: TRUE \cr
+ #' Default (if NULL): type="MC", V=10, test_size=0.2
+ fit = function(CV = NULL) {
+ CV <- checkCV(CV)
n <- nrow(private$data)
shuffle_inds <- NULL
if (CV$type == "vfold" && CV$shuffle)
for (v in seq_len(CV$V)) {
# Prepare train / test data and target, from full dataset.
# dataHO: "data Hold-Out" etc.
- test_indices <- private$get_testIndices(CV, v, n, shuffle_inds)
- dataHO <- private$data[-test_indices,]
- testX <- private$data[test_indices,]
- targetHO <- private$target[-test_indices]
- testY <- private$target[test_indices]
- # [HACK] R will cast 1-dim matrices into vectors:
- if (!is.matrix(dataHO) && !is.data.frame(dataHO))
- dataHO <- as.matrix(dataHO)
- if (!is.matrix(testX) && !is.data.frame(testX))
- testX <- as.matrix(testX)
+ test_indices <- get_testIndices(n, CV, v, shuffle_inds)
+ d <- splitTrainTest(private$data, private$target, test_indices)
best_model <- NULL
best_error <- Inf
for (p in seq_len(private$gmodel$nmodels)) {
- model_pred <- private$gmodel$get(dataHO, targetHO, p)
- prediction <- model_pred(testX)
- error <- private$loss(prediction, testY)
+ model_pred <- private$gmodel$get(d$dataTrain, d$targetTrain, p)
+ prediction <- model_pred(d$dataTest)
+ error <- private$loss(prediction, d$targetTest)
if (error <= best_error) {
newModel <- list(model=model_pred, param=private$gmodel$getParam(p))
if (error == best_error)
return (invisible(NULL))
}
V <- length(private$pmodels)
- oneLineX <- t(as.matrix(X[1,]))
+ oneLineX <- X[1,]
+ if (is.matrix(X))
+ # HACK: R behaves differently with data frames and matrices.
+ oneLineX <- t(as.matrix(oneLineX))
if (length(private$pmodels[[1]]$model(oneLineX)) >= 2)
# Soft classification:
return (Reduce("+", lapply(private$pmodels, function(m) m$model(X))) / V)
task = NULL,
gmodel = NULL,
loss = NULL,
- pmodels = NULL,
- get_testIndices = function(CV, v, n, shuffle_inds) {
- if (CV$type == "vfold") {
- # Slice indices (optionnally shuffled)
- first_index = round((v-1) * n / CV$V) + 1
- last_index = round(v * n / CV$V)
- test_indices = first_index:last_index
- if (!is.null(shuffle_inds))
- test_indices <- shuffle_inds[test_indices]
- }
- else
- # Monte-Carlo cross-validation
- test_indices = sample(n, round(n * CV$test_size))
- test_indices
- },
- defaultLoss = function(y1, y2) {
- if (private$task == "classification") {
- if (is.null(dim(y1)))
- # Standard case: "hard" classification
- mean(y1 != y2)
- else {
- # "Soft" classification: predict() outputs a probability matrix
- # In this case "target" could be in matrix form.
- if (!is.null(dim(y2)))
- mean(rowSums(abs(y1 - y2)))
- else {
- # Or not: y2 is a "factor".
- y2 <- as.character(y2)
- # NOTE: the user should provide target in matrix form because
- # matching y2 with columns is rather inefficient!
- names <- colnames(y1)
- positions <- list()
- for (idx in seq_along(names))
- positions[[ names[idx] ]] <- idx
- mean(vapply(
- seq_along(y2),
- function(idx) sum(abs(y1[idx,] - positions[[ y2[idx] ]])),
- 0))
- }
- }
- }
- else
- # Regression
- mean(abs(y1 - y2))
- }
+ pmodels = NULL
)
)
#' Journal of Machine Learning Research 22(20):1--55, 2021.
#'
#' @export
-agghoo <- function(data, target, task = NULL, gmodel = NULL, params = NULL, loss = NULL) {
+agghoo <- function(
+ data, target, task = NULL, gmodel = NULL, params = NULL, loss = NULL
+) {
# Args check:
- if (!is.data.frame(data) && !is.matrix(data))
- stop("data: data.frame or matrix")
- if (is.data.frame(target) || is.matrix(target)) {
- if (nrow(target) != nrow(data) || ncol(target) == 1)
- stop("target probability matrix does not match data size")
- }
- else if (!is.numeric(target) && !is.factor(target) && !is.character(target))
- stop("target: numeric, factor or character vector")
- if (!is.null(task))
- task = match.arg(task, c("classification", "regression"))
- if (is.character(gmodel))
- gmodel <- match.arg(gmodel, c("knn", "ppr", "rf", "tree"))
- else if (!is.null(gmodel) && !is.function(gmodel))
- # No further checks here: fingers crossed :)
- stop("gmodel: function(dataHO, targetHO, param) --> function(X) --> y")
- if (is.numeric(params) || is.character(params))
- params <- as.list(params)
- if (!is.list(params) && !is.null(params))
- stop("params: numerical, character, or list (passed to model)")
- if (is.function(gmodel) && !is.list(params))
- stop("params must be provided when using a custom model")
- if (is.list(params) && is.null(gmodel))
- stop("model (or family) must be provided when using custom params")
- if (!is.null(loss) && !is.function(loss))
- # No more checks here as well... TODO:?
- stop("loss: function(y1, y2) --> Real")
+ checkDaTa(data, target)
+ task <- checkTask(task, target)
+ modPar <- checkModPar(gmodel, params)
+ loss <- checkLoss(loss, task)
- if (is.null(task)) {
- if (is.numeric(target))
- task = "regression"
- else
- task = "classification"
- }
# Build Model object (= list of parameterized models)
- model <- Model$new(data, target, task, gmodel, params)
+ model <- Model$new(data, target, task, modPar$gmodel, modPar$params)
+
# Return AgghooCV object, to run and predict
AgghooCV$new(data, target, task, model, loss)
}
--- /dev/null
+defaultLoss_classif <- function(y1, y2) {
+ if (is.null(dim(y1)))
+ # Standard case: "hard" classification
+ mean(y1 != y2)
+ else {
+ # "Soft" classification: predict() outputs a probability matrix
+ # In this case "target" could be in matrix form.
+ if (!is.null(dim(y2)))
+ mean(rowSums(abs(y1 - y2)))
+ else {
+ # Or not: y2 is a "factor".
+ y2 <- as.character(y2)
+ # NOTE: the user should provide target in matrix form because
+ # matching y2 with columns is rather inefficient!
+ names <- colnames(y1)
+ positions <- list()
+ for (idx in seq_along(names))
+ positions[[ names[idx] ]] <- idx
+ mean(vapply(
+ seq_along(y2),
+ function(idx) sum(abs(y1[idx,] - positions[[ y2[idx] ]])),
+ 0))
+ }
+ }
+}
+
+defaultLoss_regress <- function(y1, y2) {
+ mean(abs(y1 - y2))
+}
+
+# TODO: allow strings like "MSE", "abs" etc
+checkLoss <- function(loss, task) {
+ if (!is.null(loss) && !is.function(loss))
+ stop("loss: function(y1, y2) --> Real")
+ if (is.null(loss)) {
+ loss <- if (task == "classification") {
+ defaultLoss_classif
+ } else {
+ defaultLoss_regress
+ }
+ }
+ loss
+}
+
+checkCV <- function(CV) {
+ if (is.null(CV))
+ CV <- list(type="MC", V=10, test_size=0.2, shuffle=TRUE)
+ else {
+ if (!is.list(CV))
+ stop("CV: list of type('MC'|'vfold'), V(integer, [test_size, shuffle]")
+ if (is.null(CV$type)) {
+ warning("CV$type not provided: set to MC")
+ CV$type <- "MC"
+ }
+ if (is.null(CV$V)) {
+ warning("CV$V not provided: set to 10")
+ CV$V <- 10
+ }
+ if (CV$type == "MC" && is.null(CV$test_size))
+ CV$test_size <- 0.2
+ if (CV$type == "vfold" && is.null(CV$shuffle))
+ CV$shuffle <- TRUE
+ }
+ CV
+}
+
+checkDaTa <- function(data, target) {
+ if (!is.data.frame(data) && !is.matrix(data))
+ stop("data: data.frame or matrix")
+ if (is.data.frame(target) || is.matrix(target)) {
+ if (!is.numeric(target))
+ stop("multi-columns target must be a probability matrix")
+ if (nrow(target) != nrow(data) || ncol(target) == 1)
+ stop("target probability matrix does not match data size")
+ }
+ else if (!is.numeric(target) && !is.factor(target) && !is.character(target))
+ stop("target: numeric, factor or character vector")
+}
+
+checkTask <- function(task, target) {
+ if (!is.null(task))
+ task <- match.arg(task, c("classification", "regression"))
+ task <- ifelse(is.numeric(target), "regression", "classification")
+}
+
+checkModPar <- function(gmodel, params) {
+ if (is.character(gmodel))
+ gmodel <- match.arg(gmodel, c("knn", "ppr", "rf", "tree"))
+ else if (!is.null(gmodel) && !is.function(gmodel))
+ stop("gmodel: function(dataHO, targetHO, param) --> function(X) --> y")
+ if (is.numeric(params) || is.character(params))
+ params <- as.list(params)
+ if (!is.list(params) && !is.null(params))
+ stop("params: numerical, character, or list (passed to model)")
+ if (is.function(gmodel) && !is.list(params))
+ stop("params must be provided when using a custom model")
+ if (is.list(params) && is.null(gmodel))
+ stop("model (or family) must be provided when using custom params")
+ list(gmodel=gmodel, params=params)
+}
-standardCV_core <- function(data, target, task = NULL, gmodel = NULL, params = NULL,
- loss = NULL, CV = list(type = "MC", V = 10, test_size = 0.2, shuffle = TRUE)
-) {
- if (!is.null(task))
- task = match.arg(task, c("classification", "regression"))
- if (is.character(gmodel))
- gmodel <- match.arg(gmodel, c("knn", "ppr", "rf", "tree"))
- if (is.numeric(params) || is.character(params))
- params <- as.list(params)
- if (is.null(task)) {
- if (is.numeric(target))
- task = "regression"
- else
- task = "classification"
- }
-
- if (is.null(loss)) {
- loss <- function(y1, y2) {
- if (task == "classification") {
- if (is.null(dim(y1)))
- mean(y1 != y2)
- else {
- if (!is.null(dim(y2)))
- mean(rowSums(abs(y1 - y2)))
- else {
- y2 <- as.character(y2)
- names <- colnames(y1)
- positions <- list()
- for (idx in seq_along(names))
- positions[[ names[idx] ]] <- idx
- mean(vapply(
- seq_along(y2),
- function(idx) sum(abs(y1[idx,] - positions[[ y2[idx] ]])),
- 0))
- }
- }
- }
- else
- mean(abs(y1 - y2))
- }
- }
-
+standardCV_core <- function(data, target, task, gmodel, params, loss, CV) {
n <- nrow(data)
shuffle_inds <- NULL
if (CV$type == "vfold" && CV$shuffle)
shuffle_inds <- sample(n, n)
- get_testIndices <- function(v, shuffle_inds) {
- if (CV$type == "vfold") {
- first_index = round((v-1) * n / CV$V) + 1
- last_index = round(v * n / CV$V)
- test_indices = first_index:last_index
- if (!is.null(shuffle_inds))
- test_indices <- shuffle_inds[test_indices]
- }
- else
- test_indices = sample(n, round(n * CV$test_size))
- test_indices
- }
list_testinds <- list()
for (v in seq_len(CV$V))
- list_testinds[[v]] <- get_testIndices(v, shuffle_inds)
-
+ list_testinds[[v]] <- get_testIndices(n, CV, v, shuffle_inds)
gmodel <- agghoo::Model$new(data, target, task, gmodel, params)
best_error <- Inf
best_model <- NULL
for (p in seq_len(gmodel$nmodels)) {
- error <- 0
- for (v in seq_len(CV$V)) {
+ error <- Reduce('+', lapply(seq_len(CV$V), function(v) {
testIdx <- list_testinds[[v]]
- dataHO <- data[-testIdx,]
- testX <- data[testIdx,]
- targetHO <- target[-testIdx]
- testY <- target[testIdx]
- if (!is.matrix(dataHO) && !is.data.frame(dataHO))
- dataHO <- as.matrix(dataHO)
- if (!is.matrix(testX) && !is.data.frame(testX))
- testX <- as.matrix(testX)
- model_pred <- gmodel$get(dataHO, targetHO, p)
- prediction <- model_pred(testX)
- error <- error + loss(prediction, testY)
- }
+ d <- splitTrainTest(data, target, testIdx)
+ model_pred <- gmodel$get(d$dataTrain, d$targetTrain, p)
+ prediction <- model_pred(d$dataTest)
+ loss(prediction, d$targetTest)
+ }) )
if (error <= best_error) {
- newModel <- list(model=model_pred, param=gmodel$getParam(p))
+ newModel <- list(model=gmodel$get(data, target, p),
+ param=gmodel$getParam(p))
if (error == best_error)
best_model[[length(best_model)+1]] <- newModel
else {
}
}
}
+#browser()
best_model[[ sample(length(best_model), 1) ]]
}
standardCV_run <- function(
- dataTrain, dataTest, targetTrain, targetTest, verbose, CV, floss, ...
+ dataTrain, dataTest, targetTrain, targetTest, CV, floss, verbose, ...
) {
- s <- standardCV_core(dataTrain, targetTrain, ...)
+ args <- list(...)
+ task <- checkTask(args$task, targetTrain)
+ modPar <- checkModPar(args$gmodel, args$params)
+ loss <- checkLoss(args$loss, task)
+ s <- standardCV_core(
+ dataTrain, targetTrain, task, modPar$gmodel, modPar$params, loss, CV)
if (verbose)
print(paste( "Parameter:", s$param ))
- ps <- s$model(test)
- err_s <- floss(ps, targetTest)
+ p <- s$model(dataTest)
+ err <- floss(p, targetTest)
if (verbose)
- print(paste("error CV:", err_s))
- invisible(c(errors, err_s))
+ print(paste("error CV:", err))
+ invisible(err)
}
agghoo_run <- function(
- dataTrain, dataTest, targetTrain, targetTest, verbose, CV, floss, ...
+ dataTrain, dataTest, targetTrain, targetTest, CV, floss, verbose, ...
) {
a <- agghoo(dataTrain, targetTrain, ...)
a$fit(CV)
err <- floss(pa, targetTest)
if (verbose)
print(paste("error agghoo:", err))
+ invisible(err)
}
-# ... arguments passed to agghoo or any other procedure
+# ... arguments passed to method_s (agghoo, standard CV or else)
compareTo <- function(
- data, target, rseed=-1, verbose=TRUE, floss=NULL,
- CV = list(type = "MC",
- V = 10,
- test_size = 0.2,
- shuffle = TRUE),
- method_s=NULL, ...
+ data, target, method_s, rseed=-1, floss=NULL, verbose=TRUE, ...
) {
if (rseed >= 0)
set.seed(rseed)
n <- nrow(data)
test_indices <- sample( n, round(n / ifelse(n >= 500, 10, 5)) )
- trainData <- as.matrix(data[-test_indices,])
- trainTarget <- target[-test_indices]
- testData <- as.matrix(data[test_indices,])
- testTarget <- target[test_indices]
+ d <- splitTrainTest(data, target, test_indices)
+ CV <- checkCV(list(...)$CV)
# Set error function to be used on model outputs (not in core method)
+ task <- checkTask(list(...)$task, target)
if (is.null(floss)) {
floss <- function(y1, y2) {
ifelse(task == "classification", mean(y1 != y2), mean(abs(y1 - y2)))
# Run (and compare) all methods:
runOne <- function(o) {
- o(dataTrain, dataTest, targetTrain, targetTest, verbose, CV, floss, ...)
+ o(d$dataTrain, d$dataTest, d$targetTrain, d$targetTest,
+ CV, floss, verbose, ...)
}
+ errors <- c()
if (is.list(method_s))
errors <- sapply(method_s, runOne)
else if (is.function(method_s))
errors <- runOne(method_s)
- else
- errors <- c()
invisible(errors)
}
# Run compareTo N times in parallel
+# ... : additional args to be passed to method_s
compareMulti <- function(
- data, target, N = 100, nc = NA,
- CV = list(type = "MC",
- V = 10,
- test_size = 0.2,
- shuffle = TRUE),
- method_s=NULL, ...
+ data, target, method_s, N=100, nc=NA, floss=NULL, ...
) {
+ require(parallel)
if (is.na(nc))
nc <- parallel::detectCores()
+
+ # "One" comparison for each method in method_s (list)
compareOne <- function(n) {
print(n)
- compareTo(data, target, n, verbose=FALSE, CV, method_s, ...)
+ compareTo(data, target, method_s, n, floss, verbose=FALSE, ...)
}
+
errors <- if (nc >= 2) {
- require(parallel)
parallel::mclapply(1:N, compareOne, mc.cores = nc)
} else {
lapply(1:N, compareOne)
print("Errors:")
Reduce('+', errors) / N
}
-
-# TODO: unfinished !
--- /dev/null
+get_testIndices <- function(n, CV, v, shuffle_inds) {
+ if (CV$type == "vfold") {
+ # Slice indices (optionnally shuffled)
+ first_index = round((v-1) * n / CV$V) + 1
+ last_index = round(v * n / CV$V)
+ test_indices = first_index:last_index
+ if (!is.null(shuffle_inds))
+ test_indices <- shuffle_inds[test_indices]
+ }
+ else
+ # Monte-Carlo cross-validation
+ test_indices = sample(n, round(n * CV$test_size))
+ test_indices
+}
+
+splitTrainTest <- function(data, target, testIdx) {
+ dataTrain <- data[-testIdx,]
+ targetTrain <- target[-testIdx]
+ dataTest <- data[testIdx,]
+ targetTest <- target[testIdx]
+ # [HACK] R will cast 1-dim matrices into vectors:
+ if (!is.matrix(dataTrain) && !is.data.frame(dataTrain))
+ dataTrain <- as.matrix(dataTrain)
+ if (!is.matrix(dataTest) && !is.data.frame(dataTest))
+ dataTest <- as.matrix(dataTest)
+ list(dataTrain=dataTrain, targetTrain=targetTrain,
+ dataTest=dataTest, targetTest=targetTest)
+}
\subsection{Method \code{new()}}{
Create a new AgghooCV object.
\subsection{Usage}{
-\if{html}{\out{<div class="r">}}\preformatted{AgghooCV$new(data, target, task, gmodel, loss = NULL)}\if{html}{\out{</div>}}
+\if{html}{\out{<div class="r">}}\preformatted{AgghooCV$new(data, target, task, gmodel, loss)}\if{html}{\out{</div>}}
}
\subsection{Arguments}{
\subsection{Method \code{fit()}}{
Fit an agghoo model.
\subsection{Usage}{
-\if{html}{\out{<div class="r">}}\preformatted{AgghooCV$fit(CV = list(type = "MC", V = 10, test_size = 0.2, shuffle = TRUE))}\if{html}{\out{</div>}}
+\if{html}{\out{<div class="r">}}\preformatted{AgghooCV$fit(CV = NULL)}\if{html}{\out{</div>}}
}
\subsection{Arguments}{
\if{html}{\out{<div class="arguments">}}
\describe{
\item{\code{CV}}{List describing cross-validation to run. Slots: \cr
-- type: 'vfold' or 'MC' for Monte-Carlo (default: MC) \cr
-- V: number of runs (default: 10) \cr
-- test_size: percentage of data in the test dataset, for MC \cr
- (irrelevant for V-fold). Default: 0.2.
-- shuffle: wether or not to shuffle data before V-fold.
- Irrelevant for Monte-Carlo; default: TRUE}
+ - type: 'vfold' or 'MC' for Monte-Carlo (default: MC) \cr
+ - V: number of runs (default: 10) \cr
+ - test_size: percentage of data in the test dataset, for MC
+ (irrelevant for V-fold). Default: 0.2. \cr
+ - shuffle: wether or not to shuffle data before V-fold.
+ Irrelevant for Monte-Carlo; default: TRUE \cr
+Default (if NULL): type="MC", V=10, test_size=0.2}
}
\if{html}{\out{</div>}}
}
An R6::AgghooCV object o. Then, call o$fit() and finally o$predict(newData)
}
\description{
-Run the agghoo procedure (or standard cross-validation).
+Run the (core) agghoo procedure.
Arguments specify the list of models, their parameters and the
cross-validation settings, among others.
}
Guillaume Maillard, Sylvain Arlot, Matthieu Lerasle. "Aggregated hold-out".
Journal of Machine Learning Research 22(20):1--55, 2021.
}
+\seealso{
+Function \code{\link{compareTo}}
+}