Commit | Line | Data |
---|---|---|
c5946158 BA |
1 | #' @title R6 class representing a (generic) model. |
2 | #' | |
3 | #' @description | |
4 | #' "Model" class, containing a (generic) learning function, which from | |
5 | #' data + target [+ params] returns a prediction function X --> y. | |
6 | #' Parameters for cross-validation are either provided or estimated. | |
7 | #' Model family can be chosen among "rf", "tree", "ppr" and "knn" for now. | |
8 | #' | |
d9a139b5 BA |
9 | #' @importFrom FNN knn.reg |
10 | #' @importFrom class knn | |
11 | #' @importFrom stats ppr | |
12 | #' @importFrom randomForest randomForest | |
13 | #' @importFrom rpart rpart | |
14 | #' @importFrom caret var_seq | |
15 | #' | |
c5946158 BA |
16 | #' @export |
17 | Model <- R6::R6Class("Model", | |
18 | public = list( | |
19 | #' @field nmodels Number of parameters (= number of [predictive] models) | |
20 | nmodels = NA, | |
21 | #' @description Create a new generic model. | |
22 | #' @param data Matrix or data.frame | |
23 | #' @param target Vector of targets (generally numeric or factor) | |
24 | #' @param task "regression" or "classification" | |
25 | #' @param gmodel Generic model returning a predictive function; chosen | |
26 | #' automatically given data and target nature if not provided. | |
27 | #' @param params List of parameters for cross-validation (each defining a model) | |
d9a139b5 BA |
28 | initialize = function(data, target, task, gmodel = NULL, params = NULL) { |
29 | if (is.null(gmodel)) { | |
c5946158 BA |
30 | # (Generic) model not provided |
31 | all_numeric <- is.numeric(as.matrix(data)) | |
32 | if (!all_numeric) | |
33 | # At least one non-numeric column: use random forests or trees | |
34 | # TODO: 4 = arbitrary magic number... | |
35 | gmodel = ifelse(ncol(data) >= 4, "rf", "tree") | |
36 | else | |
37 | # Numerical data | |
38 | gmodel = ifelse(task == "regression", "ppr", "knn") | |
39 | } | |
d9a139b5 | 40 | if (is.null(params)) |
c5946158 BA |
41 | # Here, gmodel is a string (= its family), |
42 | # because a custom model must be given with its parameters. | |
43 | params <- as.list(private$getParams(gmodel, data, target)) | |
44 | private$params <- params | |
45 | if (is.character(gmodel)) | |
46 | gmodel <- private$getGmodel(gmodel, task) | |
47 | private$gmodel <- gmodel | |
48 | self$nmodels <- length(private$params) | |
49 | }, | |
50 | #' @description | |
51 | #' Returns the model at index "index", trained on dataHO/targetHO. | |
52 | #' index is between 1 and self$nmodels. | |
53 | #' @param dataHO Matrix or data.frame | |
54 | #' @param targetHO Vector of targets (generally numeric or factor) | |
55 | #' @param index Index of the model in 1...nmodels | |
56 | get = function(dataHO, targetHO, index) { | |
57 | private$gmodel(dataHO, targetHO, private$params[[index]]) | |
58 | } | |
59 | ), | |
60 | private = list( | |
61 | # No need to expose model or parameters list | |
d9a139b5 BA |
62 | gmodel = NULL, |
63 | params = NULL, | |
c5946158 BA |
64 | # Main function: given a family, return a generic model, which in turn |
65 | # will output a predictive model from data + target + params. | |
66 | getGmodel = function(family, task) { | |
67 | if (family == "tree") { | |
68 | function(dataHO, targetHO, param) { | |
69 | require(rpart) | |
70 | method <- ifelse(task == "classification", "class", "anova") | |
71 | df <- data.frame(cbind(dataHO, target=targetHO)) | |
d9a139b5 | 72 | model <- rpart::rpart(target ~ ., df, method=method, control=list(cp=param)) |
c5946158 BA |
73 | function(X) predict(model, X) |
74 | } | |
75 | } | |
76 | else if (family == "rf") { | |
77 | function(dataHO, targetHO, param) { | |
78 | require(randomForest) | |
79 | if (task == "classification" && !is.factor(targetHO)) | |
80 | targetHO <- as.factor(targetHO) | |
81 | model <- randomForest::randomForest(dataHO, targetHO, mtry=param) | |
82 | function(X) predict(model, X) | |
83 | } | |
84 | } | |
85 | else if (family == "ppr") { | |
86 | function(dataHO, targetHO, param) { | |
87 | model <- stats::ppr(dataHO, targetHO, nterms=param) | |
88 | function(X) predict(model, X) | |
89 | } | |
90 | } | |
91 | else if (family == "knn") { | |
d9a139b5 BA |
92 | if (task == "classification") { |
93 | function(dataHO, targetHO, param) { | |
94 | require(class) | |
95 | function(X) class::knn(dataHO, X, cl=targetHO, k=param) | |
96 | } | |
97 | } | |
98 | else { | |
99 | function(dataHO, targetHO, param) { | |
100 | require(FNN) | |
101 | function(X) FNN::knn.reg(dataHO, X, y=targetHO, k=param)$pred | |
102 | } | |
c5946158 BA |
103 | } |
104 | } | |
105 | }, | |
106 | # Return a default list of parameters, given a gmodel family | |
107 | getParams = function(family, data, target) { | |
108 | if (family == "tree") { | |
109 | # Run rpart once to obtain a CV grid for parameter cp | |
110 | require(rpart) | |
111 | df <- data.frame(cbind(data, target=target)) | |
112 | ctrl <- list( | |
113 | minsplit = 2, | |
114 | minbucket = 1, | |
115 | maxcompete = 0, | |
116 | maxsurrogate = 0, | |
117 | usesurrogate = 0, | |
118 | xval = 0, | |
119 | surrogatestyle = 0, | |
120 | maxdepth = 30) | |
121 | r <- rpart(target ~ ., df, method="class", control=ctrl) | |
122 | cps <- r$cptable[-1,1] | |
123 | if (length(cps) <= 11) | |
124 | return (cps) | |
125 | step <- (length(cps) - 1) / 10 | |
126 | cps[unique(round(seq(1, length(cps), step)))] | |
127 | } | |
128 | else if (family == "rf") { | |
129 | p <- ncol(data) | |
130 | # Use caret package to obtain the CV grid of mtry values | |
131 | require(caret) | |
132 | caret::var_seq(p, classification = (task == "classificaton"), | |
133 | len = min(10, p-1)) | |
134 | } | |
135 | else if (family == "ppr") | |
136 | # This is nterms in ppr() function | |
137 | 1:10 | |
138 | else if (family == "knn") { | |
139 | n <- nrow(data) | |
140 | # Choose ~10 NN values | |
141 | K <- length(unique(target)) | |
142 | if (n <= 10) | |
143 | return (1:(n-1)) | |
144 | sqrt_n <- sqrt(n) | |
145 | step <- (2*sqrt_n - 1) / 10 | |
146 | grid <- unique(round(seq(1, 2*sqrt_n, step))) | |
147 | if (K == 2) { | |
148 | # Common binary classification case: odd number of neighbors | |
149 | for (i in 2:11) { | |
150 | if (grid[i] %% 2 == 0) | |
151 | grid[i] <- grid[i] + 1 #arbitrary choice | |
152 | } | |
153 | } | |
154 | grid | |
155 | } | |
156 | } | |
157 | ) | |
158 | ) |