Commit | Line | Data |
---|---|---|
859c30ec | 1 | #' runValse |
3453829e BA |
2 | #' |
3 | #' Main function | |
4 | #' | |
5 | #' @param X matrix of covariates (of size n*p) | |
6 | #' @param Y matrix of responses (of size n*m) | |
7 | #' @param procedure among 'LassoMLE' or 'LassoRank' | |
8 | #' @param selecMod method to select a model among 'DDSE', 'DJump', 'BIC' or 'AIC' | |
9 | #' @param gamma integer for the power in the penaly, by default = 1 | |
10 | #' @param mini integer, minimum number of iterations in the EM algorithm, by default = 10 | |
11 | #' @param maxi integer, maximum number of iterations in the EM algorithm, by default = 100 | |
12 | #' @param eps real, threshold to say the EM algorithm converges, by default = 1e-4 | |
13 | #' @param kmin integer, minimum number of clusters, by default = 2 | |
14 | #' @param kmax integer, maximum number of clusters, by default = 10 | |
15 | #' @param rank.min integer, minimum rank in the low rank procedure, by default = 1 | |
16 | #' @param rank.max integer, maximum rank in the low rank procedure, by default = 5 | |
17 | #' @param ncores_outer Number of cores for the outer loop on k | |
18 | #' @param ncores_inner Number of cores for the inner loop on lambda | |
19 | #' @param thresh real, threshold to say a variable is relevant, by default = 1e-8 | |
20 | #' @param grid_lambda, a vector with regularization parameters if known, by default numeric(0) | |
75b96714 | 21 | #' @param size_coll_mod (Maximum) size of a collection of models, by default 50 |
3453829e BA |
22 | #' @param fast TRUE to use compiled C code, FALSE for R code only |
23 | #' @param verbose TRUE to show some execution traces | |
1196a43d | 24 | #' @param plot TRUE to plot the selected models after run |
3453829e | 25 | #' |
fb3557f3 | 26 | #' @return |
206dfd5d | 27 | #' The selected model (except if the collection of models |
28 | #' has less than 11 models, the function returns the collection as it can not select one using Capushe) | |
3453829e BA |
29 | #' |
30 | #' @examples | |
fb3557f3 BA |
31 | #' n = 50; m = 10; p = 5 |
32 | #' beta = array(0, dim=c(p,m,2)) | |
33 | #' beta[,,1] = 1 | |
34 | #' beta[,,2] = 2 | |
35 | #' data = generateXY(n, c(0.4,0.6), rep(0,p), beta, diag(0.5, p), diag(0.5, m)) | |
36 | #' X = data$X | |
37 | #' Y = data$Y | |
206dfd5d | 38 | #' res = runValse(X, Y, kmax = 5) |
fb3557f3 BA |
39 | #' X <- matrix(runif(100), nrow=50) |
40 | #' Y <- matrix(runif(100), nrow=50) | |
41 | #' res = runValse(X, Y) | |
859c30ec | 42 | #' |
3453829e | 43 | #' @export |
859c30ec | 44 | runValse <- function(X, Y, procedure = "LassoMLE", selecMod = "DDSE", gamma = 1, mini = 10, |
0ba1b11c | 45 | maxi = 50, eps = 1e-04, kmin = 2, kmax = 3, rank.min = 1, rank.max = 5, ncores_outer = 1, |
75b96714 | 46 | ncores_inner = 1, thresh = 1e-08, grid_lambda = numeric(0), size_coll_mod = 50, |
3453829e BA |
47 | fast = TRUE, verbose = FALSE, plot = TRUE) |
48 | { | |
49 | n <- nrow(X) | |
50 | p <- ncol(X) | |
51 | m <- ncol(Y) | |
52 | ||
0ba1b11c | 53 | if (verbose) |
3453829e BA |
54 | print("main loop: over all k and all lambda") |
55 | ||
56 | if (ncores_outer > 1) { | |
57 | cl <- parallel::makeCluster(ncores_outer, outfile = "") | |
0ba1b11c BA |
58 | parallel::clusterExport(cl = cl, envir = environment(), varlist = c("X", |
59 | "Y", "procedure", "selecMod", "gamma", "mini", "maxi", "eps", "kmin", | |
60 | "kmax", "rank.min", "rank.max", "ncores_outer", "ncores_inner", "thresh", | |
3453829e BA |
61 | "size_coll_mod", "verbose", "p", "m")) |
62 | } | |
63 | ||
64 | # Compute models with k components | |
65 | computeModels <- function(k) | |
66 | { | |
0ba1b11c | 67 | if (ncores_outer > 1) |
3453829e BA |
68 | require("valse") #nodes start with an empty environment |
69 | ||
0ba1b11c | 70 | if (verbose) |
3453829e BA |
71 | print(paste("Parameters initialization for k =", k)) |
72 | # smallEM initializes parameters by k-means and regression model in each | |
73 | # component, doing this 20 times, and keeping the values maximizing the | |
74 | # likelihood after 10 iterations of the EM algorithm. | |
75 | P <- initSmallEM(k, X, Y, fast) | |
76 | if (length(grid_lambda) == 0) | |
77 | { | |
0ba1b11c | 78 | grid_lambda <- computeGridLambda(P$phiInit, P$rhoInit, P$piInit, P$gamInit, |
3453829e BA |
79 | X, Y, gamma, mini, maxi, eps, fast) |
80 | } | |
0ba1b11c | 81 | if (length(grid_lambda) > size_coll_mod) |
3453829e BA |
82 | grid_lambda <- grid_lambda[seq(1, length(grid_lambda), length.out = size_coll_mod)] |
83 | ||
0ba1b11c | 84 | if (verbose) |
3453829e BA |
85 | print("Compute relevant parameters") |
86 | # select variables according to each regularization parameter from the grid: | |
87 | # S$selected corresponding to selected variables | |
0ba1b11c | 88 | S <- selectVariables(P$phiInit, P$rhoInit, P$piInit, P$gamInit, mini, maxi, |
3453829e BA |
89 | gamma, grid_lambda, X, Y, thresh, eps, ncores_inner, fast) |
90 | ||
91 | if (procedure == "LassoMLE") { | |
0ba1b11c | 92 | if (verbose) |
3453829e BA |
93 | print("run the procedure Lasso-MLE") |
94 | # compute parameter estimations, with the Maximum Likelihood Estimator, | |
95 | # restricted on selected variables. | |
0ba1b11c | 96 | models <- constructionModelesLassoMLE(P$phiInit, P$rhoInit, P$piInit, |
3453829e BA |
97 | P$gamInit, mini, maxi, gamma, X, Y, eps, S, ncores_inner, fast, verbose) |
98 | } else { | |
0ba1b11c | 99 | if (verbose) |
3453829e BA |
100 | print("run the procedure Lasso-Rank") |
101 | # compute parameter estimations, with the Low Rank Estimator, restricted on | |
102 | # selected variables. | |
0ba1b11c | 103 | models <- constructionModelesLassoRank(S, k, mini, maxi, X, Y, eps, rank.min, |
3453829e BA |
104 | rank.max, ncores_inner, fast, verbose) |
105 | } | |
106 | # warning! Some models are NULL after running selectVariables | |
107 | models <- models[sapply(models, function(cell) !is.null(cell))] | |
108 | models | |
109 | } | |
110 | ||
111 | # List (index k) of lists (index lambda) of models | |
112 | models_list <- | |
113 | if (ncores_outer > 1) { | |
114 | parLapply(cl, kmin:kmax, computeModels) | |
115 | } else { | |
116 | lapply(kmin:kmax, computeModels) | |
117 | } | |
0ba1b11c | 118 | if (ncores_outer > 1) |
3453829e BA |
119 | parallel::stopCluster(cl) |
120 | ||
121 | if (!requireNamespace("capushe", quietly = TRUE)) | |
122 | { | |
123 | warning("'capushe' not available: returning all models") | |
124 | return(models_list) | |
125 | } | |
126 | ||
127 | # Get summary 'tableauRecap' from models | |
128 | tableauRecap <- do.call(rbind, lapply(seq_along(models_list), function(i) | |
129 | { | |
130 | models <- models_list[[i]] | |
131 | # For a collection of models (same k, several lambda): | |
132 | LLH <- sapply(models, function(model) model$llh[1]) | |
133 | k <- length(models[[1]]$pi) | |
0ba1b11c | 134 | sumPen <- sapply(models, function(model) k * (dim(model$rho)[1] + sum(model$phi[, |
3453829e | 135 | , 1] != 0) + 1) - 1) |
0ba1b11c | 136 | data.frame(model = paste(i, ".", seq_along(models), sep = ""), pen = sumPen/n, |
3453829e BA |
137 | complexity = sumPen, contrast = -LLH) |
138 | })) | |
139 | tableauRecap <- tableauRecap[which(tableauRecap[, 4] != Inf), ] | |
fb3557f3 | 140 | if (verbose) |
3453829e | 141 | print(tableauRecap) |
3453829e | 142 | |
fb3557f3 BA |
143 | if (nrow(tableauRecap) > 10) { |
144 | modSel <- capushe::capushe(tableauRecap, n) | |
145 | indModSel <- if (selecMod == "DDSE") | |
146 | { | |
147 | as.numeric(modSel@DDSE@model) | |
148 | } else if (selecMod == "Djump") | |
149 | { | |
150 | as.numeric(modSel@Djump@model) | |
151 | } else if (selecMod == "BIC") | |
152 | { | |
153 | modSel@BIC_capushe$model | |
154 | } else if (selecMod == "AIC") | |
155 | { | |
156 | modSel@AIC_capushe$model | |
157 | } | |
158 | listMod <- as.integer(unlist(strsplit(as.character(indModSel), "[.]"))) | |
159 | modelSel <- models_list[[listMod[1]]][[listMod[2]]] | |
160 | modelSel$models <- tableauRecap | |
3453829e | 161 | |
fb3557f3 BA |
162 | if (plot) |
163 | print(plot_valse(X, Y, modelSel)) | |
164 | return(modelSel) | |
165 | } | |
166 | tableauRecap | |
3453829e | 167 | } |