From 086cf723817b690dc368d2f11b7b9e88d183e804 Mon Sep 17 00:00:00 2001
From: emilie <emilie@devijver.org>
Date: Tue, 4 Apr 2017 18:02:05 +0200
Subject: [PATCH] fix some errors

---
 pkg/DESCRIPTION                     |   2 +
 pkg/R/constructionModelesLassoMLE.R | 139 ++++++++++++++--------------
 pkg/R/main.R                        |  23 +++--
 3 files changed, 84 insertions(+), 80 deletions(-)

diff --git a/pkg/DESCRIPTION b/pkg/DESCRIPTION
index eb71b76..b13ee14 100644
--- a/pkg/DESCRIPTION
+++ b/pkg/DESCRIPTION
@@ -38,5 +38,7 @@ Collate:
     'initSmallEM.R'
     'EMGrank.R'
     'EMGLLF.R'
+    'EMGrank_R.R'
+    'EMGLLF_R.R'
     'generateXY.R'
     'A_NAMESPACE.R'
diff --git a/pkg/R/constructionModelesLassoMLE.R b/pkg/R/constructionModelesLassoMLE.R
index 67fc1fc..a49529c 100644
--- a/pkg/R/constructionModelesLassoMLE.R
+++ b/pkg/R/constructionModelesLassoMLE.R
@@ -8,79 +8,82 @@
 #'
 #' export
 constructionModelesLassoMLE = function(phiInit, rhoInit, piInit, gamInit, mini, maxi,
-	gamma, X, Y, seuil, tau, selected, ncores=3, verbose=FALSE)
+                                       gamma, X, Y, thresh, tau, S, ncores=3, artefact = 1e3, verbose=FALSE)
 {
   if (ncores > 1)
-	{
+  {
     cl = parallel::makeCluster(ncores)
     parallel::clusterExport( cl, envir=environment(),
-			varlist=c("phiInit","rhoInit","gamInit","mini","maxi","gamma","X","Y","seuil",
-			"tau","selected","ncores","verbose") )
-	}
-
-	# Individual model computation
-	computeAtLambda <- function(lambda)
-	{
-		if (ncores > 1)
-			require("valse") #// nodes start with an ampty environment
-
+                             varlist=c("phiInit","rhoInit","gamInit","mini","maxi","gamma","X","Y","thresh",
+                                       "tau","S","ncores","verbose") )
+  }
+  
+  # Individual model computation
+  computeAtLambda <- function(lambda)
+  {
+    if (ncores > 1)
+      require("valse") #// nodes start with an empty environment
+    
     if (verbose)
-			print(paste("Computations for lambda=",lambda))
-
-		n = dim(X)[1]
-		p = dim(phiInit)[1]
-		m = dim(phiInit)[2]
-		k = dim(phiInit)[3]
-
-		sel.lambda = selected[[lambda]]
-#		col.sel = which(colSums(sel.lambda)!=0) #if boolean matrix
-		col.sel <- which( sapply(sel.lambda,length) > 0 ) #if list of selected vars
-
-		if (length(col.sel) == 0)
-			return (NULL)
-
-		# lambda == 0 because we compute the EMV: no penalization here
-		res = EMGLLF(phiInit[col.sel,,],rhoInit,piInit,gamInit,mini,maxi,gamma,0,
-			X[,col.sel],Y,tau)
-		
-		# Eval dimension from the result + selected
-		phiLambda2 = res_EM$phi
-		rhoLambda = res_EM$rho
-		piLambda = res_EM$pi
-    phiLambda = array(0, dim = c(p,m,k))
-		for (j in seq_along(col.sel))
-			phiLambda[col.sel[j],,] = phiLambda2[j,,]
-
-		dimension = 0
-		for (j in 1:p)
-		{
-			b = setdiff(1:m, sel.lambda[,j])
-			if (length(b) > 0)
-				phiLambda[j,b,] = 0.0
-			dimension = dimension + sum(sel.lambda[,j]!=0)
-		}
-
-		# on veut calculer la vraisemblance avec toutes nos estimations
-		densite = vector("double",n)
-		for (r in 1:k)
-		{
-			delta = Y%*%rhoLambda[,,r] - (X[, col.sel]%*%phiLambda[col.sel,,r])
-			densite = densite + piLambda[r] *
-				det(rhoLambda[,,r])/(sqrt(2*base::pi))^m * exp(-tcrossprod(delta)/2.0)
-		}
-		llhLambda = c( sum(log(densite)), (dimension+m+1)*k-1 )
-		list("phi"= phiLambda, "rho"= rhoLambda, "pi"= piLambda, "llh" = llhLambda)
-	}
-
-	#Pour chaque lambda de la grille, on calcule les coefficients
+      print(paste("Computations for lambda=",lambda))
+    
+    n = dim(X)[1]
+    p = dim(phiInit)[1]
+    m = dim(phiInit)[2]
+    k = dim(phiInit)[3]
+    
+    sel.lambda = S[[lambda]]$selected
+    #		col.sel = which(colSums(sel.lambda)!=0) #if boolean matrix
+    col.sel <- which( sapply(sel.lambda,length) > 0 ) #if list of selected vars
+    
+    if (length(col.sel) == 0)
+    {return (NULL)} else {
+      
+      # lambda == 0 because we compute the EMV: no penalization here
+      res_EM = EMGLLF(phiInit[col.sel,,],rhoInit,piInit,gamInit,mini,maxi,gamma,0,
+                      X[,col.sel],Y,tau)
+      
+      # Eval dimension from the result + selected
+      phiLambda2 = res_EM$phi
+      rhoLambda = res_EM$rho
+      piLambda = res_EM$pi
+      phiLambda = array(0, dim = c(p,m,k))
+      for (j in seq_along(col.sel))
+        phiLambda[col.sel[j],,] = phiLambda2[j,,]
+      
+      dimension = 0
+      for (j in 1:p)
+      {
+        b = setdiff(1:m, sel.lambda[[j]])## je confonds un peu ligne et colonne : est-ce dans le bon sens ? 
+        ## moi pour la dimension, j'aurai juste mis length(unlist(sel.lambda)) mais je sais pas si c'est rapide
+        if (length(b) > 0)
+          phiLambda[j,b,] = 0.0
+        dimension = dimension + sum(sel.lambda[[j]]!=0)
+      }
+      
+      # Computation of the loglikelihood
+      densite = vector("double",n)
+      for (r in 1:k)
+      {
+        delta = (Y%*%rhoLambda[,,r] - (X[, col.sel]%*%phiLambda[col.sel,,r]))/artefact
+        print(max(delta))
+        densite = densite + piLambda[r] *
+          det(rhoLambda[,,r])/(sqrt(2*base::pi))^m * exp(-tcrossprod(delta)/2.0)
+      }
+      llhLambda = c( sum(artefact^2 * log(densite)), (dimension+m+1)*k-1 )
+      list("phi"= phiLambda, "rho"= rhoLambda, "pi"= piLambda, "llh" = llhLambda)
+    }
+  }
+  
+  # For each lambda, computation of the parameters
   out =
-		if (ncores > 1)
-			parLapply(cl, glambda, computeAtLambda)
-		else
-			lapply(glambda, computeAtLambda)
-
-	if (ncores > 1)
+    if (ncores > 1)
+      parLapply(cl, 1:length(S), computeAtLambda)
+  else
+    lapply(1:length(S), computeAtLambda)
+  
+  if (ncores > 1)
     parallel::stopCluster(cl)
-
-	out
+  
+  out
 }
diff --git a/pkg/R/main.R b/pkg/R/main.R
index 8f845f4..2cd345d 100644
--- a/pkg/R/main.R
+++ b/pkg/R/main.R
@@ -21,7 +21,7 @@
 #' #TODO: a few examples
 #' @export
 valse = function(X, Y, procedure='LassoMLE', selecMod='DDSE', gamma=1, mini=10, maxi=50,
-	eps=1e-4, kmin=2, kmax=2, rang.min=1, rang.max=10, ncores_outer=1, ncores_inner=3,
+	eps=1e-4, kmin=2, kmax=4, rang.min=1, rang.max=10, ncores_outer=1, ncores_inner=1, size_coll_mod = 50,
 	verbose=FALSE)
 {
   p = dim(X)[2]
@@ -54,8 +54,8 @@ valse = function(X, Y, procedure='LassoMLE', selecMod='DDSE', gamma=1, mini=10,
     grid_lambda <- computeGridLambda(P$phiInit, P$rhoInit, P$piInit, P$gamInit, X, Y,
 			gamma, mini, maxi, eps)
 		# TODO: 100 = magic number
-    if (length(grid_lambda)>100)
-      grid_lambda = grid_lambda[seq(1, length(grid_lambda), length.out = 100)]
+    if (length(grid_lambda)>size_coll_mod)
+      grid_lambda = grid_lambda[seq(1, length(grid_lambda), length.out = size_coll_mod)]
 
 		if (verbose)
 			print("Compute relevant parameters")
@@ -63,15 +63,15 @@ valse = function(X, Y, procedure='LassoMLE', selecMod='DDSE', gamma=1, mini=10,
     #from the grid: S$selected corresponding to selected variables
     S = selectVariables(P$phiInit, P$rhoInit, P$piInit, P$gamInit, mini, maxi, gamma,
 			grid_lambda, X, Y, 1e-8, eps, ncores_inner) #TODO: 1e-8 as arg?! eps?
-
+    
     if (procedure == 'LassoMLE')
 		{
       if (verbose)
 				print('run the procedure Lasso-MLE')
       #compute parameter estimations, with the Maximum Likelihood
       #Estimator, restricted on selected variables.
-      models <- constructionModelesLassoMLE(phiInit, rhoInit, piInit, gamInit, mini,
-				maxi, gamma, X, Y, thresh, eps, S$selected, ncores_inner, verbose)
+      models <- constructionModelesLassoMLE(P$phiInit, P$rhoInit, P$piInit, P$gamInit, mini,
+				maxi, gamma, X, Y, thresh, eps, S, ncores_inner, artefact = 1e3, verbose)
     }
 		else
 		{
@@ -87,12 +87,10 @@ valse = function(X, Y, procedure='LassoMLE', selecMod='DDSE', gamma=1, mini=10,
 
 	# List (index k) of lists (index lambda) of models
 	models_list <-
-		#if (ncores_k > 1)
 	  if (ncores_outer > 1)
 			parLapply(cl, kmin:kmax, computeModels)
 		else
 			lapply(kmin:kmax, computeModels)
-	#if (ncores_k > 1)
 	if (ncores_outer > 1)
 		parallel::stopCluster(cl)
 
@@ -103,12 +101,13 @@ valse = function(X, Y, procedure='LassoMLE', selecMod='DDSE', gamma=1, mini=10,
 	}
 
 	# Get summary "tableauRecap" from models ; TODO: jusqu'à ligne 114 à mon avis là c'est faux :/
-	tableauRecap = t( sapply( models_list, function(models) {
+	tableauRecap = sapply( models_list, function(models) {
 		llh = do.call(rbind, lapply(models, function(model) model$llh))
     LLH = llh[-1,1]
     D = llh[-1,2]
-		c(LLH, D, rep(k, length(model)), 1:length(model))
-	} ))
+		c(LLH, D, rep(k, length(LLH)), 1:length(LLH))
+	}) 
+	tableauRecap
 	if (verbose)
 		print('Model selection')
   tableauRecap = tableauRecap[rowSums(tableauRecap[, 2:4])!=0,]
@@ -125,5 +124,5 @@ valse = function(X, Y, procedure='LassoMLE', selecMod='DDSE', gamma=1, mini=10,
 			modSel@BIC_capushe$model
 		else if (selecMod == 'AIC')
 			modSel@AIC_capushe$model
-  model[[tableauRecap[indModSel,3]]][[tableauRecap[indModSel,4]]]
+  models_list[[tableauRecap[indModSel,3]]][[tableauRecap[indModSel,4]]]
 }
-- 
2.44.0