-constructionModelesLassoMLE = function(phiInit,rhoInit,piInit,gamInit,mini,maxi,gamma,
- X,Y,seuil,tau,selected, parallel = FALSE)
+#' constructionModelesLassoMLE
+#'
+#' TODO: description
+#'
+#' @param ...
+#'
+#' @return ...
+#'
+#' export
+constructionModelesLassoMLE = function(phiInit, rhoInit, piInit, gamInit, mini, maxi,
+ gamma, X, Y, seuil, tau, selected, ncores=3, verbose=FALSE)
{
- if (parallel) {
- #TODO: parameter ncores (chaque tâche peut aussi demander du parallélisme...)
- cl = parallel::makeCluster( parallel::detectCores() / 4 )
- parallel::clusterExport(cl=cl,
- varlist=c("phiInit","rhoInit","gamInit","mini","maxi","X","Y","seuil","tau"),
- envir=environment())
- #Pour chaque lambda de la grille, on calcule les coefficients
- out = parLapply( seq_along(glambda), function(lambda)
- {
- n = dim(X)[1]
- p = dim(phiInit)[1]
- m = dim(phiInit)[2]
- k = dim(phiInit)[3]
-
- #TODO: phiInit[selected] et X[selected] sont bien sûr faux; par quoi remplacer ?
- #lambda == 0 c'est normal ? -> ED : oui, ici on calcule le maximum de vraisembance, donc on ne pénalise plus
- res = EMGLLF(phiInit[selected],rhoInit,piInit,gamInit,mini,maxi,gamma,0.,X[selected],Y,tau)
-
- #comment évaluer la dimension à partir du résultat et de [not]selected ?
- #dimension = ...
-
- #on veut calculer la vraisemblance avec toutes nos estimations
- densite = vector("double",n)
- for (r in 1:k)
- {
- delta = Y%*%rho[,,r] - (X[selected]%*%res$phi[selected,,r])
- densite = densite + pi[r] *
- det(rho[,,r])/(sqrt(2*base::pi))^m * exp(-tcrossprod(delta)/2.0)
- }
- llh = c( sum(log(densite[,lambda])), (dimension+m+1)*k-1 )
- list("phi"=res$phi, "rho"=res$rho, "pi"=res$pi, "llh" = llh)
- })
- parallel::stopCluster(cl)
- out
- }
- else {
- #Pour chaque lambda de la grille, on calcule les coefficients
- n = dim(X)[1]
- p = dim(phiInit)[1]
- m = dim(phiInit)[2]
- k = dim(phiInit)[3]
- L = length(selected)
- phi = list()
+ if (ncores > 1)
+ {
+ cl = parallel::makeCluster(ncores)
+ parallel::clusterExport( cl, envir=environment(),
+ varlist=c("phiInit","rhoInit","gamInit","mini","maxi","gamma","X","Y","seuil",
+ "tau","selected","ncores","verbose") )
+ }
+
+ # Individual model computation
+ computeAtLambda <- function(lambda)
+ {
+ if (ncores > 1)
+ require("valse") #// nodes start with an ampty environment
+
+ if (verbose)
+ print(paste("Computations for lambda=",lambda))
+
+ n = dim(X)[1]
+ p = dim(phiInit)[1]
+ m = dim(phiInit)[2]
+ k = dim(phiInit)[3]
+
+ sel.lambda = selected[[lambda]]
+# col.sel = which(colSums(sel.lambda)!=0) #if boolean matrix
+ col.sel <- which( sapply(sel.lambda,length) > 0 ) #if list of selected vars
+
+ if (length(col.sel) == 0)
+ return (NULL)
+
+ # lambda == 0 because we compute the EMV: no penalization here
+ res = EMGLLF(phiInit[col.sel,,],rhoInit,piInit,gamInit,mini,maxi,gamma,0,
+ X[,col.sel],Y,tau)
+
+ # Eval dimension from the result + selected
+ phiLambda2 = res_EM$phi
+ rhoLambda = res_EM$rho
+ piLambda = res_EM$pi
phiLambda = array(0, dim = c(p,m,k))
- rho = list()
- pi = list()
- llh = list()
-
- out = lapply( seq_along(selected), function(lambda)
- {
- print(lambda)
- sel.lambda = selected[[lambda]]
- col.sel = which(colSums(sel.lambda)!=0)
- if (length(col.sel)>0){
- res_EM = EMGLLF(phiInit[col.sel,,],rhoInit,piInit,gamInit,mini,maxi,gamma,0.,X[,col.sel],Y,tau)
- phiLambda2 = res_EM$phi
- rhoLambda = res_EM$rho
- piLambda = res_EM$pi
- for (j in 1:length(col.sel)){
- phiLambda[col.sel[j],,] = phiLambda2[j,,]
- }
-
- dimension = 0
- for (j in 1:p){
- b = setdiff(1:m, sel.lambda[,j])
- if (length(b) > 0){
- phiLambda[j,b,] = 0.0
- }
- dimension = dimension + sum(sel.lambda[,j]!=0)
- }
-
- #on veut calculer la vraisemblance avec toutes nos estimations
- densite = vector("double",n)
- for (r in 1:k)
- {
- delta = Y%*%rhoLambda[,,r] - (X[, col.sel]%*%phiLambda[col.sel,,r])
- densite = densite + piLambda[r] *
- det(rhoLambda[,,r])/(sqrt(2*base::pi))^m * exp(-tcrossprod(delta)/2.0)
- }
- llhLambda = c( sum(log(densite)), (dimension+m+1)*k-1 )
- list("phi"= phiLambda, "rho"= rhoLambda, "pi"= piLambda, "llh" = llhLambda)
- }
- }
- )
- return(out)
- }
+ for (j in seq_along(col.sel))
+ phiLambda[col.sel[j],,] = phiLambda2[j,,]
+
+ dimension = 0
+ for (j in 1:p)
+ {
+ b = setdiff(1:m, sel.lambda[,j])
+ if (length(b) > 0)
+ phiLambda[j,b,] = 0.0
+ dimension = dimension + sum(sel.lambda[,j]!=0)
+ }
+
+ # on veut calculer la vraisemblance avec toutes nos estimations
+ densite = vector("double",n)
+ for (r in 1:k)
+ {
+ delta = Y%*%rhoLambda[,,r] - (X[, col.sel]%*%phiLambda[col.sel,,r])
+ densite = densite + piLambda[r] *
+ det(rhoLambda[,,r])/(sqrt(2*base::pi))^m * exp(-tcrossprod(delta)/2.0)
+ }
+ llhLambda = c( sum(log(densite)), (dimension+m+1)*k-1 )
+ list("phi"= phiLambda, "rho"= rhoLambda, "pi"= piLambda, "llh" = llhLambda)
+ }
+
+ #Pour chaque lambda de la grille, on calcule les coefficients
+ out =
+ if (ncores > 1)
+ parLapply(cl, seq_along(glambda), computeAtLambda)
+ else
+ lapply(seq_along(glambda), computeAtLambda)
+
+ if (ncores > 1)
+ parallel::stopCluster(cl)
+
+ out
}
-constructionModelesLassoRank = function(pi,rho,mini,maxi,X,Y,tau,A1,rangmin,rangmax)
+#' constructionModelesLassoRank
+#'
+#' TODO: description
+#'
+#' @param ...
+#'
+#' @return ...
+#'
+#' export
+constructionModelesLassoRank = function(pi, rho, mini, maxi, X, Y, tau, A1, rangmin,
+ rangmax, ncores, verbose=FALSE)
{
#get matrix sizes
n = dim(X)[1]
# output parameters
phi = array(0, dim=c(p,m,k,L*Size))
llh = matrix(0, L*Size, 2) #log-likelihood
- for(lambdaIndex in 1:L)
+
+ # TODO: // loop
+ for(lambdaIndex in 1:L)
{
# on ne garde que les colonnes actives
# 'active' sera l'ensemble des variables informatives
#' @examples
#' #TODO: a few examples
#' @export
-valse = function(X,Y,procedure = 'LassoMLE',selecMod = 'DDSE',gamma = 1,mini = 10,
- maxi = 50,eps = 1e-4,kmin = 2,kmax = 2,
- rang.min = 1,rang.max = 10, ncores_k=1, ncores_lambda=3, verbose=FALSE)
+valse = function(X, Y, procedure='LassoMLE', selecMod='DDSE', gamma=1, mini=10, maxi=50,
+ eps=1e-4, kmin=2, kmax=2, rang.min=1, rang.max=10, ncores_outer=1, ncores_inner=3,
+ verbose=FALSE)
{
p = dim(X)[2]
m = dim(Y)[2]
if (verbose)
print("main loop: over all k and all lambda")
- if (ncores_k > 1)
+ if (ncores_outer > 1)
{
- cl = parallel::makeCluster(ncores_k)
+ cl = parallel::makeCluster(ncores_outer)
parallel::clusterExport( cl=cl, envir=environment(), varlist=c("X","Y","procedure",
"selecMod","gamma","mini","maxi","eps","kmin","kmax","rang.min","rang.max",
- "ncores_k","ncores_lambda","verbose","p","m","k","tableauRecap") )
+ "ncores_outer","ncores_inner","verbose","p","m","k","tableauRecap") )
}
# Compute model with k components
computeModel <- function(k)
{
- if (ncores_k > 1)
+ if (ncores_outer > 1)
require("valse") #nodes start with an empty environment
if (verbose)
#from the grid: A1 corresponding to selected variables, and
#A2 corresponding to unselected variables.
S = selectVariables(P$phiInit,P$rhoInit,P$piInit,P$gamInit,mini,maxi,gamma,
- grid_lambda,X,Y,1e-8,eps,ncores_lambda)
+ grid_lambda,X,Y,1e-8,eps,ncores_inner)
if (procedure == 'LassoMLE')
{
#compute parameter estimations, with the Maximum Likelihood
#Estimator, restricted on selected variables.
model = constructionModelesLassoMLE(phiInit, rhoInit, piInit, gamInit, mini,
- maxi, gamma, X, Y, thresh, eps, S$selected)
- llh = matrix(ncol = 2)
- for (l in seq_along(model[[k]]))
- llh = rbind(llh, model[[k]][[l]]$llh)
- LLH = llh[-1,1]
- D = llh[-1,2]
+ maxi, gamma, X, Y, thresh, eps, S$selected, ncores_inner, verbose)
}
else
{
#compute parameter estimations, with the Low Rank
#Estimator, restricted on selected variables.
model = constructionModelesLassoRank(S$Pi, S$Rho, mini, maxi, X, Y, eps, A1,
- rank.min, rank.max)
+ rank.min, rank.max, ncores_inner, verbose)
################################################
### Regarder la SUITE
- phi = runProcedure2()$phi
- Phi2 = Phi
- if (dim(Phi2)[1] == 0)
- Phi[, , 1:k,] <- phi
- else
- {
- Phi <- array(0, dim = c(p, m, kmax, dim(Phi2)[4] + dim(phi)[4]))
- Phi[, , 1:(dim(Phi2)[3]), 1:(dim(Phi2)[4])] <<- Phi2
- Phi[, , 1:k,-(1:(dim(Phi2)[4]))] <<- phi
- }
+# phi = runProcedure2()$phi
+# Phi2 = Phi
+# if (dim(Phi2)[1] == 0)
+# Phi[, , 1:k,] <- phi
+# else
+# {
+# Phi <- array(0, dim = c(p, m, kmax, dim(Phi2)[4] + dim(phi)[4]))
+# Phi[, , 1:(dim(Phi2)[3]), 1:(dim(Phi2)[4])] <<- Phi2
+# Phi[, , 1:k,-(1:(dim(Phi2)[4]))] <<- phi
+# }
}
- tableauRecap[[k]] = matrix(c(LLH, D, rep(k, length(model[[k]])), 1:length(model[[k]])), ncol = 4))
+ model
}
- model <-
+ model_list <-
if (ncores_k > 1)
parLapply(cl, kmin:kmax, computeModel)
else
if (ncores_k > 1)
parallel::stopCluster(cl)
+ # Get summary "tableauRecap" from models
+ tableauRecap = t( sapply( seq_along(model_list), function(model) {
+ llh = matrix(ncol = 2)
+ for (l in seq_along(model))
+ llh = rbind(llh, model[[l]]$llh)
+ LLH = llh[-1,1]
+ D = llh[-1,2]
+ c(LLH, D, rep(k, length(model)), 1:length(model))
+ } ) )
+
if (verbose)
print('Model selection')
- tableauRecap = do.call( rbind, tableaurecap ) #stack list cells into a matrix
+ tableauRecap = do.call( rbind, tableauRecap ) #stack list cells into a matrix
tableauRecap = tableauRecap[rowSums(tableauRecap[, 2:4])!=0,]
tableauRecap = tableauRecap[(tableauRecap[,1])!=Inf,]
data = cbind(1:dim(tableauRecap)[1], tableauRecap[,2], tableauRecap[,2], tableauRecap[,1])