e5205a5e61a28cae4d237756471f8cb020f1dee2
[valse.git] / R / valse.R
1 #' Main function
2 #'
3 #' @param X matrix of covariates (of size n*p)
4 #' @param Y matrix of responses (of size n*m)
5 #' @param procedure among 'LassoMLE' or 'LassoRank'
6 #' @param selecMod method to select a model among 'SlopeHeuristic', 'BIC', 'AIC'
7 #' @param gamma integer for the power in the penaly, by default = 1
8 #' @param mini integer, minimum number of iterations in the EM algorithm, by default = 10
9 #' @param maxi integer, maximum number of iterations in the EM algorithm, by default = 100
10 #' @param eps real, threshold to say the EM algorithm converges, by default = 1e-4
11 #' @param kmin integer, minimum number of clusters, by default = 2
12 #' @param kmax integer, maximum number of clusters, by default = 10
13 #' @param rang.min integer, minimum rank in the low rank procedure, by default = 1
14 #' @param rang.max integer, maximum rank in the
15 #' @return a list with estimators of parameters
16 #' @export
17 #-----------------------------------------------------------------------
18 valse = function(X,Y,procedure,selecMod,gamma = 1,mini = 10,
19 maxi = 100,eps = 1e-4,kmin = 2,kmax = 10,
20 rang.min = 1,rang.max = 10) {
21 ##################################
22 #core workflow: compute all models
23 ##################################
24
25 p = dim(phiInit)[1]
26 m = dim(phiInit)[2]
27
28 print("main loop: over all k and all lambda")
29 for (k in kmin:kmax)
30 {
31 print(k)
32
33 print("Parameters initialization")
34 #smallEM initializes parameters by k-means and regression model in each component,
35 #doing this 20 times, and keeping the values maximizing the likelihood after 10
36 #iterations of the EM algorithm.
37 init = initSmallEM(k, X, Y)
38 phiInit <<- init$phiInit
39 rhoInit <<- init$rhoInit
40 piInit <<- init$piInit
41 gamInit <<- init$gamInit
42
43 gridLambda <<- gridLambda(phiInit, rhoInit, piInit, tauInit, X, Y, gamma, mini, maxi, eps)
44
45 print("Compute relevant parameters")
46 #select variables according to each regularization parameter
47 #from the grid: A1 corresponding to selected variables, and
48 #A2 corresponding to unselected variables.
49 params = selectiontotale(phiInit,rhoInit,piInit,tauInit,
50 mini,maxi,gamma,gridLambda,
51 X,Y,thresh,eps)
52 A1 <<- params$A1
53 A2 <<- params$A2
54 Rho <<- params$Rho
55 Pi <<- params$Pi
56
57 if (procedure == 'LassoMLE') {
58 print('run the procedure Lasso-MLE')
59 #compute parameter estimations, with the Maximum Likelihood
60 #Estimator, restricted on selected variables.
61 model = constructionModelesLassoMLE(
62 phiInit, rhoInit,piInit,tauInit,mini,maxi,
63 gamma,gridLambda,X,Y,thresh,eps,A1,A2)
64 ################################################
65 ### Regarder la SUITE
66 r1 = runProcedure1()
67 Phi2 = Phi
68 Rho2 = Rho
69 Pi2 = Pi
70
71 if (is.null(dim(Phi2)))
72 #test was: size(Phi2) == 0
73 {
74 Phi[, , 1:k] <<- r1$phi
75 Rho[, , 1:k] <<- r1$rho
76 Pi[1:k,] <<- r1$pi
77 } else
78 {
79 Phi <<-
80 array(0., dim = c(p, m, kmax, dim(Phi2)[4] + dim(r1$phi)[4]))
81 Phi[, , 1:(dim(Phi2)[3]), 1:(dim(Phi2)[4])] <<- Phi2
82 Phi[, , 1:k, dim(Phi2)[4] + 1] <<- r1$phi
83 Rho <<-
84 array(0., dim = c(m, m, kmax, dim(Rho2)[4] + dim(r1$rho)[4]))
85 Rho[, , 1:(dim(Rho2)[3]), 1:(dim(Rho2)[4])] <<- Rho2
86 Rho[, , 1:k, dim(Rho2)[4] + 1] <<- r1$rho
87 Pi <<- array(0., dim = c(kmax, dim(Pi2)[2] + dim(r1$pi)[2]))
88 Pi[1:nrow(Pi2), 1:ncol(Pi2)] <<- Pi2
89 Pi[1:k, ncol(Pi2) + 1] <<- r1$pi
90 }
91 } else {
92 print('run the procedure Lasso-Rank')
93 #compute parameter estimations, with the Low Rank
94 #Estimator, restricted on selected variables.
95 model = constructionModelesLassoRank(Pi, Rho, mini, maxi, X, Y, eps,
96 A1, rank.min, rank.max)
97
98 ################################################
99 ### Regarder la SUITE
100 phi = runProcedure2()$phi
101 Phi2 = Phi
102 if (dim(Phi2)[1] == 0)
103 {
104 Phi[, , 1:k,] <<- phi
105 } else
106 {
107 Phi <<- array(0, dim = c(p, m, kmax, dim(Phi2)[4] + dim(phi)[4]))
108 Phi[, , 1:(dim(Phi2)[3]), 1:(dim(Phi2)[4])] <<- Phi2
109 Phi[, , 1:k,-(1:(dim(Phi2)[4]))] <<- phi
110 }
111 }
112 }
113 print('Model selection')
114 if (selecMod == 'SlopeHeuristic') {
115
116 } else if (selecMod == 'BIC') {
117
118 } else if (selecMod == 'AIC') {
119
120 }
121 }