add verbose possibility in sub-functions
[valse.git] / pkg / R / main.R
1 #' valse
2 #'
3 #' Main function
4 #'
5 #' @param X matrix of covariates (of size n*p)
6 #' @param Y matrix of responses (of size n*m)
7 #' @param procedure among 'LassoMLE' or 'LassoRank'
8 #' @param selecMod method to select a model among 'DDSE', 'DJump', 'BIC' or 'AIC'
9 #' @param gamma integer for the power in the penaly, by default = 1
10 #' @param mini integer, minimum number of iterations in the EM algorithm, by default = 10
11 #' @param maxi integer, maximum number of iterations in the EM algorithm, by default = 100
12 #' @param eps real, threshold to say the EM algorithm converges, by default = 1e-4
13 #' @param kmin integer, minimum number of clusters, by default = 2
14 #' @param kmax integer, maximum number of clusters, by default = 10
15 #' @param rang.min integer, minimum rank in the low rank procedure, by default = 1
16 #' @param rang.max integer, maximum rank in the
17 #'
18 #' @return a list with estimators of parameters
19 #'
20 #' @examples
21 #' #TODO: a few examples
22 #' @export
23 valse = function(X, Y, procedure='LassoMLE', selecMod='DDSE', gamma=1, mini=10, maxi=50,
24 eps=1e-4, kmin=2, kmax=4, rang.min=1, rang.max=10, ncores_outer=1, ncores_inner=1, size_coll_mod = 50,
25 verbose=FALSE)
26 {
27 p = dim(X)[2]
28 m = dim(Y)[2]
29 n = dim(X)[1]
30
31 if (verbose)
32 print("main loop: over all k and all lambda")
33
34 if (ncores_outer > 1)
35 {
36 cl = parallel::makeCluster(ncores_outer, outfile='')
37 parallel::clusterExport( cl=cl, envir=environment(), varlist=c("X","Y","procedure",
38 "selecMod","gamma","mini","maxi","eps","kmin","kmax","rang.min","rang.max",
39 "ncores_outer","ncores_inner","verbose","p","m") )
40 }
41
42 # Compute models with k components
43 computeModels <- function(k)
44 {
45 if (ncores_outer > 1)
46 require("valse") #nodes start with an empty environment
47
48 if (verbose)
49 print(paste("Parameters initialization for k =",k))
50 #smallEM initializes parameters by k-means and regression model in each component,
51 #doing this 20 times, and keeping the values maximizing the likelihood after 10
52 #iterations of the EM algorithm.
53 P = initSmallEM(k, X, Y)
54 grid_lambda <- computeGridLambda(P$phiInit, P$rhoInit, P$piInit, P$gamInit, X, Y,
55 gamma, mini, maxi, eps)
56 if (length(grid_lambda)>size_coll_mod)
57 grid_lambda = grid_lambda[seq(1, length(grid_lambda), length.out = size_coll_mod)]
58
59 if (verbose)
60 print("Compute relevant parameters")
61 #select variables according to each regularization parameter
62 #from the grid: S$selected corresponding to selected variables
63 S = selectVariables(P$phiInit, P$rhoInit, P$piInit, P$gamInit, mini, maxi, gamma,
64 grid_lambda, X, Y, 1e-8, eps, ncores_inner) #TODO: 1e-8 as arg?! eps?
65
66 if (procedure == 'LassoMLE')
67 {
68 if (verbose)
69 print('run the procedure Lasso-MLE')
70 #compute parameter estimations, with the Maximum Likelihood
71 #Estimator, restricted on selected variables.
72 models <- constructionModelesLassoMLE(P$phiInit, P$rhoInit, P$piInit, P$gamInit,
73 mini, maxi, gamma, X, Y, thresh, eps, S, ncores_inner, artefact = 1e3, verbose)
74 }
75 else
76 {
77 if (verbose)
78 print('run the procedure Lasso-Rank')
79 #compute parameter estimations, with the Low Rank
80 #Estimator, restricted on selected variables.
81 models <- constructionModelesLassoRank(S$Pi, S$Rho, mini, maxi, X, Y, eps, A1,
82 rank.min, rank.max, ncores_inner, verbose)
83 }
84 #attention certains modeles sont NULL après selectVariables
85 models = models[sapply(models, function(cell) !is.null(cell))]
86 models
87 }
88
89 # List (index k) of lists (index lambda) of models
90 models_list <-
91 if (ncores_outer > 1)
92 parLapply(cl, kmin:kmax, computeModels)
93 else
94 lapply(kmin:kmax, computeModels)
95 if (ncores_outer > 1)
96 parallel::stopCluster(cl)
97
98 if (! requireNamespace("capushe", quietly=TRUE))
99 {
100 warning("'capushe' not available: returning all models")
101 return (models_list)
102 }
103
104 # Get summary "tableauRecap" from models
105 tableauRecap = do.call( rbind, lapply( models_list, function(models) {
106 #Pour un groupe de modeles (même k, différents lambda):
107 llh = matrix(ncol = 2)
108 for (l in seq_along(models))
109 llh = rbind(llh, models[[l]]$llh) #TODO: LLF? harmonize between EMGLLF and EMGrank?
110 LLH = llh[-1,1]
111 D = llh[-1,2]
112 k = length(models[[1]]$pi)
113 cbind(LLH, D, rep(k, length(models)), 1:length(models))
114 } ) )
115 tableauRecap = tableauRecap[rowSums(tableauRecap[, 2:4])!=0,]
116 tableauRecap = tableauRecap[(tableauRecap[,1])!=Inf,]
117 data = cbind(1:dim(tableauRecap)[1], tableauRecap[,2], tableauRecap[,2], tableauRecap[,1])
118 browser()
119 modSel = capushe::capushe(data, n)
120 indModSel <-
121 if (selecMod == 'DDSE')
122 as.numeric(modSel@DDSE@model)
123 else if (selecMod == 'Djump')
124 as.numeric(modSel@Djump@model)
125 else if (selecMod == 'BIC')
126 modSel@BIC_capushe$model
127 else if (selecMod == 'AIC')
128 modSel@AIC_capushe$model
129 models_list[[tableauRecap[indModSel,3]]][[tableauRecap[indModSel,4]]]
130 }