essaiPlot almost ok : add a color per cluster? For now, it is a script
[valse.git] / pkg / R / valse.R
1 #' Main function
2 #'
3 #' @param X matrix of covariates (of size n*p)
4 #' @param Y matrix of responses (of size n*m)
5 #' @param procedure among 'LassoMLE' or 'LassoRank'
6 #' @param selecMod method to select a model among 'DDSE', 'DJump', 'BIC' or 'AIC'
7 #' @param gamma integer for the power in the penaly, by default = 1
8 #' @param mini integer, minimum number of iterations in the EM algorithm, by default = 10
9 #' @param maxi integer, maximum number of iterations in the EM algorithm, by default = 100
10 #' @param eps real, threshold to say the EM algorithm converges, by default = 1e-4
11 #' @param kmin integer, minimum number of clusters, by default = 2
12 #' @param kmax integer, maximum number of clusters, by default = 10
13 #' @param rang.min integer, minimum rank in the low rank procedure, by default = 1
14 #' @param rang.max integer, maximum rank in the
15 #' @return a list with estimators of parameters
16 #' @export
17 #-----------------------------------------------------------------------
18 valse = function(X,Y,procedure = 'LassoMLE',selecMod = 'DDSE',gamma = 1,mini = 10,
19 maxi = 50,eps = 1e-4,kmin = 2,kmax = 3,
20 rang.min = 1,rang.max = 10) {
21 ##################################
22 #core workflow: compute all models
23 ##################################
24
25 p = dim(X)[2]
26 m = dim(Y)[2]
27 n = dim(X)[1]
28
29 model = list()
30 tableauRecap = array(0, dim=c(1000,4))
31 cpt = 0
32 print("main loop: over all k and all lambda")
33
34 for (k in kmin:kmax){
35 print(k)
36 print("Parameters initialization")
37 #smallEM initializes parameters by k-means and regression model in each component,
38 #doing this 20 times, and keeping the values maximizing the likelihood after 10
39 #iterations of the EM algorithm.
40 init = initSmallEM(k, X, Y)
41 phiInit <<- init$phiInit
42 rhoInit <<- init$rhoInit
43 piInit <<- init$piInit
44 gamInit <<- init$gamInit
45 source('~/valse/pkg/R/gridLambda.R')
46 grid_lambda <<- gridLambda(phiInit, rhoInit, piInit, gamInit, X, Y, gamma, mini, maxi, eps)
47
48 if (length(grid_lambda)>50){
49 grid_lambda = grid_lambda[seq(1, length(grid_lambda), length.out = 50)]
50 }
51 print("Compute relevant parameters")
52 #select variables according to each regularization parameter
53 #from the grid: A1 corresponding to selected variables, and
54 #A2 corresponding to unselected variables.
55
56 params = selectiontotale(phiInit,rhoInit,piInit,gamInit,mini,maxi,gamma,grid_lambda,X,Y,1e-8,eps)
57 #params2 = selectVariables(phiInit,rhoInit,piInit,gamInit,mini,maxi,gamma,grid_lambda[seq(1,length(grid_lambda), by=3)],X,Y,1e-8,eps)
58 ## etrange : params et params 2 sont différents ...
59
60 selected <<- params$selected
61 Rho <<- params$Rho
62 Pi <<- params$Pi
63
64 if (procedure == 'LassoMLE') {
65 print('run the procedure Lasso-MLE')
66 #compute parameter estimations, with the Maximum Likelihood
67 #Estimator, restricted on selected variables.
68 model[[k]] = constructionModelesLassoMLE(phiInit, rhoInit,piInit,gamInit,mini,maxi,gamma,X,Y,thresh,eps,selected)
69 llh = matrix(ncol = 2)
70 for (l in seq_along(model[[k]])){
71 llh = rbind(llh, model[[k]][[l]]$llh)
72 }
73 LLH = llh[-1,1]
74 D = llh[-1,2]
75 } else {
76 print('run the procedure Lasso-Rank')
77 #compute parameter estimations, with the Low Rank
78 #Estimator, restricted on selected variables.
79 model = constructionModelesLassoRank(Pi, Rho, mini, maxi, X, Y, eps,
80 A1, rank.min, rank.max)
81
82 ################################################
83 ### Regarder la SUITE
84 phi = runProcedure2()$phi
85 Phi2 = Phi
86 if (dim(Phi2)[1] == 0)
87 {
88 Phi[, , 1:k,] <<- phi
89 } else
90 {
91 Phi <<- array(0, dim = c(p, m, kmax, dim(Phi2)[4] + dim(phi)[4]))
92 Phi[, , 1:(dim(Phi2)[3]), 1:(dim(Phi2)[4])] <<- Phi2
93 Phi[, , 1:k,-(1:(dim(Phi2)[4]))] <<- phi
94 }
95 }
96 tableauRecap[(cpt+1):(cpt+length(model[[k]])), ] = matrix(c(LLH, D, rep(k, length(model[[k]])), 1:length(model[[k]])), ncol = 4)
97 cpt = cpt+length(model[[k]])
98 }
99 print('Model selection')
100 tableauRecap = tableauRecap[rowSums(tableauRecap[, 2:4])!=0,]
101 tableauRecap = tableauRecap[(tableauRecap[,1])!=Inf,]
102 data = cbind(1:dim(tableauRecap)[1], tableauRecap[,2], tableauRecap[,2], tableauRecap[,1])
103 require(capushe)
104 modSel = capushe(data, n)
105 if (selecMod == 'DDSE') {
106 indModSel = as.numeric(modSel@DDSE@model)
107 } else if (selecMod == 'Djump') {
108 indModSel = as.numeric(modSel@Djump@model)
109 } else if (selecMod == 'BIC') {
110 indModSel = modSel@BIC_capushe$model
111 } else if (selecMod == 'AIC') {
112 indModSel = modSel@AIC_capushe$model
113 }
114 return(model[[tableauRecap[indModSel,3]]][[tableauRecap[indModSel,4]]])
115 }