c7dab9ff |
1 | #' Main function |
2 | #' |
3 | #' @param X matrix of covariates (of size n*p) |
4 | #' @param Y matrix of responses (of size n*m) |
5 | #' @param procedure among 'LassoMLE' or 'LassoRank' |
3f62d540 |
6 | #' @param selecMod method to select a model among 'DDSE', 'DJump', 'BIC' or 'AIC' |
c7dab9ff |
7 | #' @param gamma integer for the power in the penaly, by default = 1 |
8 | #' @param mini integer, minimum number of iterations in the EM algorithm, by default = 10 |
9 | #' @param maxi integer, maximum number of iterations in the EM algorithm, by default = 100 |
10 | #' @param eps real, threshold to say the EM algorithm converges, by default = 1e-4 |
11 | #' @param kmin integer, minimum number of clusters, by default = 2 |
12 | #' @param kmax integer, maximum number of clusters, by default = 10 |
13 | #' @param rang.min integer, minimum rank in the low rank procedure, by default = 1 |
14 | #' @param rang.max integer, maximum rank in the |
15 | #' @return a list with estimators of parameters |
16 | #' @export |
17 | #----------------------------------------------------------------------- |
3f62d540 |
18 | valse = function(X,Y,procedure = 'LassoMLE',selecMod = 'DDSE',gamma = 1,mini = 10, |
31444abc |
19 | maxi = 50,eps = 1e-4,kmin = 2,kmax = 2, |
c7dab9ff |
20 | rang.min = 1,rang.max = 10) { |
21 | ################################## |
22 | #core workflow: compute all models |
23 | ################################## |
24 | |
3f62d540 |
25 | p = dim(X)[2] |
26 | m = dim(Y)[2] |
51485a7d |
27 | n = dim(X)[1] |
c7dab9ff |
28 | |
3f62d540 |
29 | model = list() |
30 | tableauRecap = array(0, dim=c(1000,4)) |
51485a7d |
31 | cpt = 0 |
c7dab9ff |
32 | print("main loop: over all k and all lambda") |
3f62d540 |
33 | |
34 | for (k in kmin:kmax){ |
c7dab9ff |
35 | print(k) |
c7dab9ff |
36 | print("Parameters initialization") |
37 | #smallEM initializes parameters by k-means and regression model in each component, |
38 | #doing this 20 times, and keeping the values maximizing the likelihood after 10 |
39 | #iterations of the EM algorithm. |
40 | init = initSmallEM(k, X, Y) |
41 | phiInit <<- init$phiInit |
42 | rhoInit <<- init$rhoInit |
43 | piInit <<- init$piInit |
44 | gamInit <<- init$gamInit |
3f62d540 |
45 | grid_lambda <<- gridLambda(phiInit, rhoInit, piInit, gamInit, X, Y, gamma, mini, maxi, eps) |
c7dab9ff |
46 | |
31444abc |
47 | if (length(grid_lambda)>100){ |
48 | grid_lambda = grid_lambda[seq(1, length(grid_lambda), length.out = 100)] |
49 | } |
c7dab9ff |
50 | print("Compute relevant parameters") |
51 | #select variables according to each regularization parameter |
52 | #from the grid: A1 corresponding to selected variables, and |
53 | #A2 corresponding to unselected variables. |
51485a7d |
54 | |
3f62d540 |
55 | params = selectiontotale(phiInit,rhoInit,piInit,gamInit,mini,maxi,gamma,grid_lambda,X,Y,1e-8,eps) |
56 | #params2 = selectVariables(phiInit,rhoInit,piInit,gamInit,mini,maxi,gamma,grid_lambda[seq(1,length(grid_lambda), by=3)],X,Y,1e-8,eps) |
51485a7d |
57 | ## etrange : params et params 2 sont différents ... |
f1b0e0ab |
58 | selected <<- params$selected |
c7dab9ff |
59 | Rho <<- params$Rho |
60 | Pi <<- params$Pi |
61 | |
62 | if (procedure == 'LassoMLE') { |
63 | print('run the procedure Lasso-MLE') |
64 | #compute parameter estimations, with the Maximum Likelihood |
65 | #Estimator, restricted on selected variables. |
51485a7d |
66 | model[[k]] = constructionModelesLassoMLE(phiInit, rhoInit,piInit,gamInit,mini,maxi,gamma,X,Y,thresh,eps,selected) |
3f62d540 |
67 | llh = matrix(ncol = 2) |
68 | for (l in seq_along(model[[k]])){ |
69 | llh = rbind(llh, model[[k]][[l]]$llh) |
70 | } |
71 | LLH = llh[-1,1] |
72 | D = llh[-1,2] |
c7dab9ff |
73 | } else { |
74 | print('run the procedure Lasso-Rank') |
75 | #compute parameter estimations, with the Low Rank |
76 | #Estimator, restricted on selected variables. |
77 | model = constructionModelesLassoRank(Pi, Rho, mini, maxi, X, Y, eps, |
78 | A1, rank.min, rank.max) |
79 | |
80 | ################################################ |
81 | ### Regarder la SUITE |
82 | phi = runProcedure2()$phi |
83 | Phi2 = Phi |
84 | if (dim(Phi2)[1] == 0) |
85 | { |
86 | Phi[, , 1:k,] <<- phi |
87 | } else |
88 | { |
89 | Phi <<- array(0, dim = c(p, m, kmax, dim(Phi2)[4] + dim(phi)[4])) |
90 | Phi[, , 1:(dim(Phi2)[3]), 1:(dim(Phi2)[4])] <<- Phi2 |
91 | Phi[, , 1:k,-(1:(dim(Phi2)[4]))] <<- phi |
92 | } |
93 | } |
51485a7d |
94 | tableauRecap[(cpt+1):(cpt+length(model[[k]])), ] = matrix(c(LLH, D, rep(k, length(model[[k]])), 1:length(model[[k]])), ncol = 4) |
95 | cpt = cpt+length(model[[k]]) |
c7dab9ff |
96 | } |
97 | print('Model selection') |
3f62d540 |
98 | tableauRecap = tableauRecap[rowSums(tableauRecap[, 2:4])!=0,] |
99 | tableauRecap = tableauRecap[(tableauRecap[,1])!=Inf,] |
100 | data = cbind(1:dim(tableauRecap)[1], tableauRecap[,2], tableauRecap[,2], tableauRecap[,1]) |
101 | require(capushe) |
102 | modSel = capushe(data, n) |
103 | if (selecMod == 'DDSE') { |
104 | indModSel = as.numeric(modSel@DDSE@model) |
105 | } else if (selecMod == 'Djump') { |
106 | indModSel = as.numeric(modSel@Djump@model) |
c7dab9ff |
107 | } else if (selecMod == 'BIC') { |
3f62d540 |
108 | indModSel = modSel@BIC_capushe$model |
c7dab9ff |
109 | } else if (selecMod == 'AIC') { |
3f62d540 |
110 | indModSel = modSel@AIC_capushe$model |
c7dab9ff |
111 | } |
3f62d540 |
112 | return(model[[tableauRecap[indModSel,3]]][[tableauRecap[indModSel,4]]]) |
c7dab9ff |
113 | } |