X-Git-Url: https://git.auder.net/?p=valse.git;a=blobdiff_plain;f=pkg%2FR%2Fmain.R;h=ab25daf5fa210933b5516a0fa875e06b935f2808;hp=7b78a154f72ea653a0254b39e79c32ee621b3603;hb=0eb161e3f3d018bce7d98fc85622d14910f89d43;hpb=4cc632c9a1e1d93e9a43a402d1361f23afc50e5e diff --git a/pkg/R/main.R b/pkg/R/main.R index 7b78a15..ab25daf 100644 --- a/pkg/R/main.R +++ b/pkg/R/main.R @@ -20,41 +20,39 @@ #' @examples #' #TODO: a few examples #' @export -valse = function(X,Y,procedure = 'LassoMLE',selecMod = 'DDSE',gamma = 1,mini = 10, - maxi = 50,eps = 1e-4,kmin = 2,kmax = 2, - rang.min = 1,rang.max = 10, ncores_k=1, ncores_lambda=3, verbose=FALSE) +valse = function(X, Y, procedure='LassoMLE', selecMod='DDSE', gamma=1, mini=10, maxi=50, + eps=1e-4, kmin=2, kmax=2, rang.min=1, rang.max=10, ncores_outer=1, ncores_inner=3, + verbose=FALSE) { p = dim(X)[2] m = dim(Y)[2] n = dim(X)[1] - tableauRecap = list() if (verbose) print("main loop: over all k and all lambda") - if (ncores_k > 1) + if (ncores_outer > 1) { - cl = parallel::makeCluster(ncores_k) + cl = parallel::makeCluster(ncores_outer) parallel::clusterExport( cl=cl, envir=environment(), varlist=c("X","Y","procedure", "selecMod","gamma","mini","maxi","eps","kmin","kmax","rang.min","rang.max", - "ncores_k","ncores_lambda","verbose","p","m","k","tableauRecap") ) + "ncores_outer","ncores_inner","verbose","p","m","k","tableauRecap") ) } - # Compute model with k components - computeModel <- function(k) + # Compute models with k components + computeModels <- function(k) { - if (ncores_k > 1) + if (ncores_outer > 1) require("valse") #nodes start with an empty environment if (verbose) print(paste("Parameters initialization for k =",k)) - #smallEM initializes parameters by k-means and regression model in each component, + #smallEM initializes parameters by k-means and regression model in each component, #doing this 20 times, and keeping the values maximizing the likelihood after 10 #iterations of the EM algorithm. P = initSmallEM(k, X, Y) grid_lambda <- computeGridLambda(P$phiInit, P$rhoInit, P$piInit, P$gamInit, X, Y, gamma, mini, maxi, eps) - # TODO: 100 = magic number if (length(grid_lambda)>100) grid_lambda = grid_lambda[seq(1, length(grid_lambda), length.out = 100)] @@ -62,10 +60,9 @@ valse = function(X,Y,procedure = 'LassoMLE',selecMod = 'DDSE',gamma = 1,mini = 1 if (verbose) print("Compute relevant parameters") #select variables according to each regularization parameter - #from the grid: A1 corresponding to selected variables, and - #A2 corresponding to unselected variables. - S = selectVariables(P$phiInit,P$rhoInit,P$piInit,P$gamInit,mini,maxi,gamma, - grid_lambda,X,Y,1e-8,eps,ncores_lambda) + #from the grid: S$selected corresponding to selected variables + S = selectVariables(P$phiInit, P$rhoInit, P$piInit, P$gamInit, mini, maxi, gamma, + grid_lambda, X, Y, 1e-8, eps, ncores_inner) #TODO: 1e-8 as arg?! eps? if (procedure == 'LassoMLE') { @@ -73,13 +70,8 @@ valse = function(X,Y,procedure = 'LassoMLE',selecMod = 'DDSE',gamma = 1,mini = 1 print('run the procedure Lasso-MLE') #compute parameter estimations, with the Maximum Likelihood #Estimator, restricted on selected variables. - model = constructionModelesLassoMLE(phiInit, rhoInit, piInit, gamInit, mini, - maxi, gamma, X, Y, thresh, eps, S$selected) - llh = matrix(ncol = 2) - for (l in seq_along(model[[k]])) - llh = rbind(llh, model[[k]][[l]]$llh) - LLH = llh[-1,1] - D = llh[-1,2] + models <- constructionModelesLassoMLE(phiInit, rhoInit, piInit, gamInit, mini, + maxi, gamma, X, Y, thresh, eps, S$selected, ncores_inner, verbose) } else { @@ -87,42 +79,41 @@ valse = function(X,Y,procedure = 'LassoMLE',selecMod = 'DDSE',gamma = 1,mini = 1 print('run the procedure Lasso-Rank') #compute parameter estimations, with the Low Rank #Estimator, restricted on selected variables. - model = constructionModelesLassoRank(S$Pi, S$Rho, mini, maxi, X, Y, eps, A1, - rank.min, rank.max) - - ################################################ - ### Regarder la SUITE - phi = runProcedure2()$phi - Phi2 = Phi - if (dim(Phi2)[1] == 0) - Phi[, , 1:k,] <- phi - else - { - Phi <- array(0, dim = c(p, m, kmax, dim(Phi2)[4] + dim(phi)[4])) - Phi[, , 1:(dim(Phi2)[3]), 1:(dim(Phi2)[4])] <<- Phi2 - Phi[, , 1:k,-(1:(dim(Phi2)[4]))] <<- phi - } + models <- constructionModelesLassoRank(S$Pi, S$Rho, mini, maxi, X, Y, eps, A1, + rank.min, rank.max, ncores_inner, verbose) } - tableauRecap[[k]] = matrix(c(LLH, D, rep(k, length(model[[k]])), 1:length(model[[k]])), ncol = 4)) + models } - model <- + # List (index k) of lists (index lambda) of models + models_list <- if (ncores_k > 1) - parLapply(cl, kmin:kmax, computeModel) + parLapply(cl, kmin:kmax, computeModels) else - lapply(kmin:kmax, computeModel) + lapply(kmin:kmax, computeModels) if (ncores_k > 1) parallel::stopCluster(cl) + if (! requireNamespace("capushe", quietly=TRUE)) + { + warning("'capushe' not available: returning all models") + return (models_list) + } + + # Get summary "tableauRecap" from models ; TODO: jusqu'à ligne 114 à mon avis là c'est faux :/ + tableauRecap = t( sapply( models_list, function(models) { + llh = do.call(rbind, lapply(models, function(model) model$llh) + LLH = llh[-1,1] + D = llh[-1,2] + c(LLH, D, rep(k, length(model)), 1:length(model)) + ) } ) ) if (verbose) print('Model selection') - tableauRecap = do.call( rbind, tableaurecap ) #stack list cells into a matrix tableauRecap = tableauRecap[rowSums(tableauRecap[, 2:4])!=0,] - tableauRecap = tableauRecap[(tableauRecap[,1])!=Inf,] + tableauRecap = tableauRecap[!is.infinite(tableauRecap[,1]),] data = cbind(1:dim(tableauRecap)[1], tableauRecap[,2], tableauRecap[,2], tableauRecap[,1]) - require(capushe) - modSel = capushe(data, n) + modSel = capushe::capushe(data, n) indModSel <- if (selecMod == 'DDSE') as.numeric(modSel@DDSE@model)