| 1 | % Generated by roxygen2: do not edit by hand |
| 2 | % Please edit documentation in R/agghoo.R |
| 3 | \name{agghoo} |
| 4 | \alias{agghoo} |
| 5 | \title{agghoo} |
| 6 | \usage{ |
| 7 | agghoo(data, target, task = NULL, gmodel = NULL, params = NULL, loss = NULL) |
| 8 | } |
| 9 | \arguments{ |
| 10 | \item{data}{Data frame or matrix containing the data in lines.} |
| 11 | |
| 12 | \item{target}{The target values to predict. Generally a vector, |
| 13 | but possibly a matrix in the case of "soft classification".} |
| 14 | |
| 15 | \item{task}{"classification" or "regression". Default: |
| 16 | regression if target is numerical, classification otherwise.} |
| 17 | |
| 18 | \item{gmodel}{A "generic model", which is a function returning a predict |
| 19 | function (taking X as only argument) from the tuple |
| 20 | (dataHO, targetHO, param), where 'HO' stands for 'Hold-Out', |
| 21 | referring to cross-validation. Cross-validation is run on an array |
| 22 | of 'param's. See params argument. Default: see R6::Model.} |
| 23 | |
| 24 | \item{params}{A list of parameters. Often, one list cell is just a |
| 25 | numerical value, but in general it could be of any type. |
| 26 | Default: see R6::Model.} |
| 27 | |
| 28 | \item{loss}{A function assessing the error of a prediction. |
| 29 | Arguments are y1 and y2 (comparing a prediction to known values). |
| 30 | loss(y1, y2) --> real number (error). Default: see R6::AgghooCV.} |
| 31 | } |
| 32 | \value{ |
| 33 | An R6::AgghooCV object o. Then, call o$fit() and finally o$predict(newData) |
| 34 | } |
| 35 | \description{ |
| 36 | Run the agghoo procedure (or standard cross-validation). |
| 37 | Arguments specify the list of models, their parameters and the |
| 38 | cross-validation settings, among others. |
| 39 | } |
| 40 | \examples{ |
| 41 | # Regression: |
| 42 | a_reg <- agghoo(iris[,-c(2,5)], iris[,2]) |
| 43 | a_reg$fit() |
| 44 | pr <- a_reg$predict(iris[,-c(2,5)] + rnorm(450, sd=0.1)) |
| 45 | # Classification |
| 46 | a_cla <- agghoo(iris[,-5], iris[,5]) |
| 47 | a_cla$fit() |
| 48 | pc <- a_cla$predict(iris[,-5] + rnorm(600, sd=0.1)) |
| 49 | |
| 50 | } |
| 51 | \references{ |
| 52 | Guillaume Maillard, Sylvain Arlot, Matthieu Lerasle. "Aggregated hold-out". |
| 53 | Journal of Machine Learning Research 22(20):1--55, 2021. |
| 54 | } |