X-Git-Url: https://git.auder.net/?p=valse.git;a=blobdiff_plain;f=pkg%2FR%2Fmain.R;h=701a2c93e78262950eec17d3013ee97f2a86ac3d;hp=f0809540b62deb1dec2c7e4570059e3f5a4ec9d9;hb=0e0fb59a6ea0a975d1a9059153aa27f54458bf95;hpb=086ca318ed5580e961ceda3f1e122a2da58e4427 diff --git a/pkg/R/main.R b/pkg/R/main.R index f080954..701a2c9 100644 --- a/pkg/R/main.R +++ b/pkg/R/main.R @@ -14,99 +14,117 @@ #' @param kmax integer, maximum number of clusters, by default = 10 #' @param rang.min integer, minimum rank in the low rank procedure, by default = 1 #' @param rang.max integer, maximum rank in the +#' @param ncores_outer Number of cores for the outer loop on k +#' @param ncores_inner Number of cores for the inner loop on lambda +#' @param size_coll_mod (Maximum) size of a collection of models +#' @param fast TRUE to use compiled C code, FALSE for R code only +#' @param verbose TRUE to show some execution traces #' #' @return a list with estimators of parameters #' #' @examples #' #TODO: a few examples #' @export -valse = function(X,Y,procedure = 'LassoMLE',selecMod = 'DDSE',gamma = 1,mini = 10, - maxi = 50,eps = 1e-4,kmin = 2,kmax = 2, - rang.min = 1,rang.max = 10) +valse = function(X, Y, procedure='LassoMLE', selecMod='DDSE', gamma=1, mini=10, maxi=50, + eps=1e-4, kmin=2, kmax=4, rang.min=1, rang.max=10, ncores_outer=1, ncores_inner=1, + size_coll_mod=50, fast=TRUE, verbose=FALSE, plot = TRUE) { - #################### - # compute all models - #################### - p = dim(X)[2] m = dim(Y)[2] n = dim(X)[1] - - model = list() - tableauRecap = array(0, dim=c(1000,4)) - cpt = 0 - print("main loop: over all k and all lambda") - - for (k in kmin:kmax) + + if (verbose) + print("main loop: over all k and all lambda") + + if (ncores_outer > 1) { - print(k) - print("Parameters initialization") - #smallEM initializes parameters by k-means and regression model in each component, + cl = parallel::makeCluster(ncores_outer, outfile='') + parallel::clusterExport( cl=cl, envir=environment(), varlist=c("X","Y","procedure", + "selecMod","gamma","mini","maxi","eps","kmin","kmax","rang.min","rang.max", + "ncores_outer","ncores_inner","verbose","p","m") ) + } + + # Compute models with k components + computeModels <- function(k) + { + if (ncores_outer > 1) + require("valse") #nodes start with an empty environment + + if (verbose) + print(paste("Parameters initialization for k =",k)) + #smallEM initializes parameters by k-means and regression model in each component, #doing this 20 times, and keeping the values maximizing the likelihood after 10 #iterations of the EM algorithm. - init = initSmallEM(k, X, Y) - phiInit <- init$phiInit - rhoInit <- init$rhoInit - piInit <- init$piInit - gamInit <- init$gamInit - grid_lambda <- computeGridLambda(phiInit, rhoInit, piInit, gamInit, X, Y, gamma, mini, maxi, eps) - - if (length(grid_lambda)>100) - grid_lambda = grid_lambda[seq(1, length(grid_lambda), length.out = 100)] - print("Compute relevant parameters") + P = initSmallEM(k, X, Y) + grid_lambda <- computeGridLambda(P$phiInit, P$rhoInit, P$piInit, P$gamInit, X, Y, + gamma, mini, maxi, eps, fast) + if (length(grid_lambda)>size_coll_mod) + grid_lambda = grid_lambda[seq(1, length(grid_lambda), length.out = size_coll_mod)] + + if (verbose) + print("Compute relevant parameters") #select variables according to each regularization parameter - #from the grid: A1 corresponding to selected variables, and - #A2 corresponding to unselected variables. - - params = selectiontotale(phiInit,rhoInit,piInit,gamInit,mini,maxi,gamma,grid_lambda,X,Y,1e-8,eps) - #params2 = selectVariables(phiInit,rhoInit,piInit,gamInit,mini,maxi,gamma,grid_lambda[seq(1,length(grid_lambda), by=3)],X,Y,1e-8,eps) - ## etrange : params et params 2 sont différents ... - selected <- params$selected - Rho <- params$Rho - Pi <- params$Pi + #from the grid: S$selected corresponding to selected variables + S = selectVariables(P$phiInit, P$rhoInit, P$piInit, P$gamInit, mini, maxi, gamma, + grid_lambda, X, Y, 1e-8, eps, ncores_inner, fast) #TODO: 1e-8 as arg?! eps? if (procedure == 'LassoMLE') { - print('run the procedure Lasso-MLE') + if (verbose) + print('run the procedure Lasso-MLE') #compute parameter estimations, with the Maximum Likelihood #Estimator, restricted on selected variables. - model[[k]] = constructionModelesLassoMLE(phiInit, rhoInit,piInit,gamInit,mini,maxi,gamma,X,Y,thresh,eps,selected) - llh = matrix(ncol = 2) - for (l in seq_along(model[[k]])) - llh = rbind(llh, model[[k]][[l]]$llh) - LLH = llh[-1,1] - D = llh[-1,2] + models <- constructionModelesLassoMLE(P$phiInit, P$rhoInit, P$piInit, P$gamInit, + mini, maxi, gamma, X, Y, thresh, eps, S, ncores_inner, artefact=1e3, fast, verbose) } else { - print('run the procedure Lasso-Rank') + if (verbose) + print('run the procedure Lasso-Rank') #compute parameter estimations, with the Low Rank #Estimator, restricted on selected variables. - model = constructionModelesLassoRank(Pi, Rho, mini, maxi, X, Y, eps, - A1, rank.min, rank.max) - - ################################################ - ### Regarder la SUITE - phi = runProcedure2()$phi - Phi2 = Phi - if (dim(Phi2)[1] == 0) - Phi[, , 1:k,] <- phi - else - { - Phi <- array(0, dim = c(p, m, kmax, dim(Phi2)[4] + dim(phi)[4])) - Phi[, , 1:(dim(Phi2)[3]), 1:(dim(Phi2)[4])] <<- Phi2 - Phi[, , 1:k,-(1:(dim(Phi2)[4]))] <<- phi - } + models <- constructionModelesLassoRank(S$Pi, S$Rho, mini, maxi, X, Y, eps, A1, + rank.min, rank.max, ncores_inner, fast, verbose) } - tableauRecap[(cpt+1):(cpt+length(model[[k]])), ] = matrix(c(LLH, D, rep(k, length(model[[k]])), 1:length(model[[k]])), ncol = 4) - cpt = cpt+length(model[[k]]) + #attention certains modeles sont NULL après selectVariables + models = models[sapply(models, function(cell) !is.null(cell))] + models } - print('Model selection') - tableauRecap = tableauRecap[rowSums(tableauRecap[, 2:4])!=0,] - tableauRecap = tableauRecap[(tableauRecap[,1])!=Inf,] - data = cbind(1:dim(tableauRecap)[1], tableauRecap[,2], tableauRecap[,2], tableauRecap[,1]) - require(capushe) - modSel = capushe(data, n) + + # List (index k) of lists (index lambda) of models + models_list <- + if (ncores_outer > 1) + parLapply(cl, kmin:kmax, computeModels) + else + lapply(kmin:kmax, computeModels) + if (ncores_outer > 1) + parallel::stopCluster(cl) + + if (! requireNamespace("capushe", quietly=TRUE)) + { + warning("'capushe' not available: returning all models") + return (models_list) + } + + # Get summary "tableauRecap" from models + tableauRecap = do.call( rbind, lapply( seq_along(models_list), function(i) { + models <- models_list[[i]] + #Pour un groupe de modeles (même k, différents lambda): + LLH <- sapply( models, function(model) model$llh[1] ) + k = length(models[[1]]$pi) + # TODO: chuis pas sûr du tout des lignes suivantes... + # J'ai l'impression qu'il manque des infos + ## C'est surtout que la pénalité est la mauvaise, la c'est celle du Lasso, nous on veut ici + ##celle de l'heuristique de pentes + #sumPen = sapply( models, function(model) + # sum( model$pi^gamma * sapply(1:k, function(r) sum(abs(model$phi[,,r]))) ) ) + sumPen = sapply(models, function(model) + k*(dim(model$rho)[1]+sum(model$phi[,,1]!=0)+1)-1) + data.frame(model=paste(i,".",seq_along(models),sep=""), + pen=sumPen/n, complexity=sumPen, contrast=LLH) + } ) ) + + modSel = capushe::capushe(tableauRecap, n) indModSel <- if (selecMod == 'DDSE') as.numeric(modSel@DDSE@model) @@ -116,5 +134,12 @@ valse = function(X,Y,procedure = 'LassoMLE',selecMod = 'DDSE',gamma = 1,mini = 1 modSel@BIC_capushe$model else if (selecMod == 'AIC') modSel@AIC_capushe$model - model[[tableauRecap[indModSel,3]]][[tableauRecap[indModSel,4]]] + + mod = as.character(tableauRecap[indModSel,1]) + listMod = as.integer(unlist(strsplit(mod, "[.]"))) + if (plot){ + print(plot_valse()) + } + models_list[[listMod[1]]][[listMod[2]]] + }