remove extra prints
[valse.git] / pkg / R / main.R
CommitLineData
086ca318
BA
1#' valse
2#'
3#' Main function
4#'
5#' @param X matrix of covariates (of size n*p)
6#' @param Y matrix of responses (of size n*m)
7#' @param procedure among 'LassoMLE' or 'LassoRank'
8#' @param selecMod method to select a model among 'DDSE', 'DJump', 'BIC' or 'AIC'
9#' @param gamma integer for the power in the penaly, by default = 1
10#' @param mini integer, minimum number of iterations in the EM algorithm, by default = 10
11#' @param maxi integer, maximum number of iterations in the EM algorithm, by default = 100
12#' @param eps real, threshold to say the EM algorithm converges, by default = 1e-4
13#' @param kmin integer, minimum number of clusters, by default = 2
14#' @param kmax integer, maximum number of clusters, by default = 10
15#' @param rang.min integer, minimum rank in the low rank procedure, by default = 1
16#' @param rang.max integer, maximum rank in the
aa480ac1
BA
17#' @param ncores_outer Number of cores for the outer loop on k
18#' @param ncores_inner Number of cores for the inner loop on lambda
19#' @param size_coll_mod (Maximum) size of a collection of models
20#' @param fast TRUE to use compiled C code, FALSE for R code only
21#' @param verbose TRUE to show some execution traces
086ca318
BA
22#'
23#' @return a list with estimators of parameters
24#'
25#' @examples
26#' #TODO: a few examples
27#' @export
2279a641 28valse = function(X, Y, procedure='LassoMLE', selecMod='DDSE', gamma=1, mini=10, maxi=50,
aa480ac1 29 eps=1e-4, kmin=2, kmax=4, rang.min=1, rang.max=10, ncores_outer=1, ncores_inner=1,
4c9cc558 30 size_coll_mod=50, fast=TRUE, verbose=FALSE, plot = TRUE)
086ca318 31{
086ca318
BA
32 p = dim(X)[2]
33 m = dim(Y)[2]
34 n = dim(X)[1]
4cc632c9 35
4cc632c9
BA
36 if (verbose)
37 print("main loop: over all k and all lambda")
38
2279a641 39 if (ncores_outer > 1)
086ca318 40 {
08f4604c 41 cl = parallel::makeCluster(ncores_outer, outfile='')
4cc632c9
BA
42 parallel::clusterExport( cl=cl, envir=environment(), varlist=c("X","Y","procedure",
43 "selecMod","gamma","mini","maxi","eps","kmin","kmax","rang.min","rang.max",
08f4604c 44 "ncores_outer","ncores_inner","verbose","p","m") )
4cc632c9
BA
45 }
46
0eb161e3
BA
47 # Compute models with k components
48 computeModels <- function(k)
4cc632c9 49 {
2279a641 50 if (ncores_outer > 1)
4cc632c9
BA
51 require("valse") #nodes start with an empty environment
52
53 if (verbose)
54 print(paste("Parameters initialization for k =",k))
0eb161e3 55 #smallEM initializes parameters by k-means and regression model in each component,
086ca318
BA
56 #doing this 20 times, and keeping the values maximizing the likelihood after 10
57 #iterations of the EM algorithm.
4cc632c9
BA
58 P = initSmallEM(k, X, Y)
59 grid_lambda <- computeGridLambda(P$phiInit, P$rhoInit, P$piInit, P$gamInit, X, Y,
aa480ac1 60 gamma, mini, maxi, eps, fast)
086cf723 61 if (length(grid_lambda)>size_coll_mod)
62 grid_lambda = grid_lambda[seq(1, length(grid_lambda), length.out = size_coll_mod)]
4cc632c9
BA
63
64 if (verbose)
65 print("Compute relevant parameters")
086ca318 66 #select variables according to each regularization parameter
0eb161e3
BA
67 #from the grid: S$selected corresponding to selected variables
68 S = selectVariables(P$phiInit, P$rhoInit, P$piInit, P$gamInit, mini, maxi, gamma,
aa480ac1 69 grid_lambda, X, Y, 1e-8, eps, ncores_inner, fast) #TODO: 1e-8 as arg?! eps?
086cf723 70
086ca318 71 if (procedure == 'LassoMLE')
39046da6 72 {
4cc632c9
BA
73 if (verbose)
74 print('run the procedure Lasso-MLE')
086ca318
BA
75 #compute parameter estimations, with the Maximum Likelihood
76 #Estimator, restricted on selected variables.
08f4604c 77 models <- constructionModelesLassoMLE(P$phiInit, P$rhoInit, P$piInit, P$gamInit,
aa480ac1 78 mini, maxi, gamma, X, Y, thresh, eps, S, ncores_inner, artefact=1e3, fast, verbose)
086ca318
BA
79 }
80 else
39046da6 81 {
4cc632c9
BA
82 if (verbose)
83 print('run the procedure Lasso-Rank')
086ca318
BA
84 #compute parameter estimations, with the Low Rank
85 #Estimator, restricted on selected variables.
0eb161e3 86 models <- constructionModelesLassoRank(S$Pi, S$Rho, mini, maxi, X, Y, eps, A1,
aa480ac1 87 rank.min, rank.max, ncores_inner, fast, verbose)
086ca318 88 }
08f4604c
BA
89 #attention certains modeles sont NULL après selectVariables
90 models = models[sapply(models, function(cell) !is.null(cell))]
0eb161e3 91 models
086ca318 92 }
4cc632c9 93
0eb161e3
BA
94 # List (index k) of lists (index lambda) of models
95 models_list <-
19041906 96 if (ncores_outer > 1)
0eb161e3 97 parLapply(cl, kmin:kmax, computeModels)
4cc632c9 98 else
0eb161e3 99 lapply(kmin:kmax, computeModels)
19041906 100 if (ncores_outer > 1)
4cc632c9
BA
101 parallel::stopCluster(cl)
102
0eb161e3
BA
103 if (! requireNamespace("capushe", quietly=TRUE))
104 {
105 warning("'capushe' not available: returning all models")
106 return (models_list)
107 }
108
08f4604c 109 # Get summary "tableauRecap" from models
2e813ad2
BA
110 tableauRecap = do.call( rbind, lapply( seq_along(models_list), function(i) {
111 models <- models_list[[i]]
08f4604c 112 #Pour un groupe de modeles (même k, différents lambda):
71a323e6 113 LLH <- sapply( models, function(model) model$llh[1] )
114 k = length(models[[1]]$pi)
2e813ad2 115 # TODO: chuis pas sûr du tout des lignes suivantes...
71a323e6 116 # J'ai l'impression qu'il manque des infos
117 ## C'est surtout que la pénalité est la mauvaise, la c'est celle du Lasso, nous on veut ici
118 ##celle de l'heuristique de pentes
119 #sumPen = sapply( models, function(model)
120 # sum( model$pi^gamma * sapply(1:k, function(r) sum(abs(model$phi[,,r]))) ) )
121 sumPen = sapply(models, function(model)
122 k*(dim(model$rho)[1]+sum(model$phi[,,1]!=0)+1)-1)
2e813ad2 123 data.frame(model=paste(i,".",seq_along(models),sep=""),
71a323e6 124 pen=sumPen/n, complexity=sumPen, contrast=LLH)
08f4604c 125 } ) )
0e0fb59a 126
71a323e6 127 modSel = capushe::capushe(tableauRecap, n)
086ca318
BA
128 indModSel <-
129 if (selecMod == 'DDSE')
130 as.numeric(modSel@DDSE@model)
131 else if (selecMod == 'Djump')
132 as.numeric(modSel@Djump@model)
133 else if (selecMod == 'BIC')
134 modSel@BIC_capushe$model
135 else if (selecMod == 'AIC')
136 modSel@AIC_capushe$model
71a323e6 137
138 mod = as.character(tableauRecap[indModSel,1])
139 listMod = as.integer(unlist(strsplit(mod, "[.]")))
4c9cc558 140 if (plot){
141 print(plot_valse())
142 }
71a323e6 143 models_list[[listMod[1]]][[listMod[2]]]
4c9cc558 144
086ca318 145}