#' @param X matrix of covariates (of size n*p)
#' @param Y matrix of responses (of size n*m)
#' @param procedure among 'LassoMLE' or 'LassoRank'
-#' @param selecMod method to select a model among 'SlopeHeuristic', 'BIC', 'AIC'
+#' @param selecMod method to select a model among 'DDSE', 'DJump', 'BIC' or 'AIC'
#' @param gamma integer for the power in the penaly, by default = 1
#' @param mini integer, minimum number of iterations in the EM algorithm, by default = 10
#' @param maxi integer, maximum number of iterations in the EM algorithm, by default = 100
#' @return a list with estimators of parameters
#' @export
#-----------------------------------------------------------------------
-valse = function(X,Y,procedure,selecMod,gamma = 1,mini = 10,
- maxi = 100,eps = 1e-4,kmin = 2,kmax = 10,
+valse = function(X,Y,procedure = 'LassoMLE',selecMod = 'DDSE',gamma = 1,mini = 10,
+ maxi = 50,eps = 1e-4,kmin = 2,kmax = 2,
rang.min = 1,rang.max = 10) {
##################################
#core workflow: compute all models
##################################
- p = dim(phiInit)[1]
- m = dim(phiInit)[2]
+ p = dim(X)[2]
+ m = dim(Y)[2]
+ n = dim(X)[1]
+ model = list()
+ tableauRecap = array(0, dim=c(1000,4))
+ cpt = 0
print("main loop: over all k and all lambda")
- for (k in kmin:kmax)
- {
+
+ for (k in kmin:kmax){
print(k)
-
print("Parameters initialization")
#smallEM initializes parameters by k-means and regression model in each component,
#doing this 20 times, and keeping the values maximizing the likelihood after 10
rhoInit <<- init$rhoInit
piInit <<- init$piInit
gamInit <<- init$gamInit
+ grid_lambda <<- gridLambda(phiInit, rhoInit, piInit, gamInit, X, Y, gamma, mini, maxi, eps)
- gridLambda <<- gridLambda(phiInit, rhoInit, piInit, gamInit, X, Y, gamma, mini, maxi, eps)
-
+ if (length(grid_lambda)>100){
+ grid_lambda = grid_lambda[seq(1, length(grid_lambda), length.out = 100)]
+ }
print("Compute relevant parameters")
#select variables according to each regularization parameter
#from the grid: A1 corresponding to selected variables, and
#A2 corresponding to unselected variables.
- params = selectiontotale(phiInit,rhoInit,piInit,gamInit,mini,maxi,gamma,gridLambda,X,Y,1e-8,eps)
- A1 <<- params$A1
- A2 <<- params$A2
+
+ params = selectiontotale(phiInit,rhoInit,piInit,gamInit,mini,maxi,gamma,grid_lambda,X,Y,1e-8,eps)
+ #params2 = selectVariables(phiInit,rhoInit,piInit,gamInit,mini,maxi,gamma,grid_lambda[seq(1,length(grid_lambda), by=3)],X,Y,1e-8,eps)
+ ## etrange : params et params 2 sont différents ...
+ selected <<- params$selected
Rho <<- params$Rho
Pi <<- params$Pi
print('run the procedure Lasso-MLE')
#compute parameter estimations, with the Maximum Likelihood
#Estimator, restricted on selected variables.
- model = constructionModelesLassoMLE(
- phiInit, rhoInit,piInit,tauInit,mini,maxi,
- gamma,gridLambda,X,Y,thresh,eps,A1,A2)
- ################################################
- ### Regarder la SUITE
- r1 = runProcedure1()
- Phi2 = Phi
- Rho2 = Rho
- Pi2 = Pi
-
- if (is.null(dim(Phi2)))
- #test was: size(Phi2) == 0
- {
- Phi[, , 1:k] <<- r1$phi
- Rho[, , 1:k] <<- r1$rho
- Pi[1:k,] <<- r1$pi
- } else
- {
- Phi <<-
- array(0., dim = c(p, m, kmax, dim(Phi2)[4] + dim(r1$phi)[4]))
- Phi[, , 1:(dim(Phi2)[3]), 1:(dim(Phi2)[4])] <<- Phi2
- Phi[, , 1:k, dim(Phi2)[4] + 1] <<- r1$phi
- Rho <<-
- array(0., dim = c(m, m, kmax, dim(Rho2)[4] + dim(r1$rho)[4]))
- Rho[, , 1:(dim(Rho2)[3]), 1:(dim(Rho2)[4])] <<- Rho2
- Rho[, , 1:k, dim(Rho2)[4] + 1] <<- r1$rho
- Pi <<- array(0., dim = c(kmax, dim(Pi2)[2] + dim(r1$pi)[2]))
- Pi[1:nrow(Pi2), 1:ncol(Pi2)] <<- Pi2
- Pi[1:k, ncol(Pi2) + 1] <<- r1$pi
+ model[[k]] = constructionModelesLassoMLE(phiInit, rhoInit,piInit,gamInit,mini,maxi,gamma,X,Y,thresh,eps,selected)
+ llh = matrix(ncol = 2)
+ for (l in seq_along(model[[k]])){
+ llh = rbind(llh, model[[k]][[l]]$llh)
}
+ LLH = llh[-1,1]
+ D = llh[-1,2]
} else {
print('run the procedure Lasso-Rank')
#compute parameter estimations, with the Low Rank
Phi[, , 1:k,-(1:(dim(Phi2)[4]))] <<- phi
}
}
+ tableauRecap[(cpt+1):(cpt+length(model[[k]])), ] = matrix(c(LLH, D, rep(k, length(model[[k]])), 1:length(model[[k]])), ncol = 4)
+ cpt = cpt+length(model[[k]])
}
print('Model selection')
- if (selecMod == 'SlopeHeuristic') {
-
+ tableauRecap = tableauRecap[rowSums(tableauRecap[, 2:4])!=0,]
+ tableauRecap = tableauRecap[(tableauRecap[,1])!=Inf,]
+ data = cbind(1:dim(tableauRecap)[1], tableauRecap[,2], tableauRecap[,2], tableauRecap[,1])
+ require(capushe)
+ modSel = capushe(data, n)
+ if (selecMod == 'DDSE') {
+ indModSel = as.numeric(modSel@DDSE@model)
+ } else if (selecMod == 'Djump') {
+ indModSel = as.numeric(modSel@Djump@model)
} else if (selecMod == 'BIC') {
-
+ indModSel = modSel@BIC_capushe$model
} else if (selecMod == 'AIC') {
-
+ indModSel = modSel@AIC_capushe$model
}
+ return(model[[tableauRecap[indModSel,3]]][[tableauRecap[indModSel,4]]])
}