Adjustments for CRAN upload
[valse.git] / pkg / R / constructionModelesLassoRank.R
... / ...
CommitLineData
1#' constructionModelesLassoRank
2#'
3#' Construct a collection of models with the Lasso-Rank procedure.
4#'
5#' @param S output of selectVariables.R
6#' @param k number of components
7#' @param mini integer, minimum number of iterations in the EM algorithm, by default = 10
8#' @param maxi integer, maximum number of iterations in the EM algorithm, by default = 100
9#' @param X matrix of covariates (of size n*p)
10#' @param Y matrix of responses (of size n*m)
11#' @param eps real, threshold to say the EM algorithm converges, by default = 1e-4
12#' @param rank.min integer, minimum rank in the low rank procedure, by default = 1
13#' @param rank.max integer, maximum rank in the low rank procedure, by default = 5
14#' @param ncores Number of cores, by default = 3
15#' @param fast TRUE to use compiled C code, FALSE for R code only
16#' @param verbose TRUE to show some execution traces
17#'
18#' @return a list with several models, defined by phi (the regression parameter reparametrized),
19#' rho (the covariance parameter reparametrized), pi (the proportion parameter is the mixture model), llh
20#' (the value of the loglikelihood function for this estimator on the training dataset). The list is given
21#' for several levels of sparsity, given by several regularization parameters computed automatically,
22#' and several ranks (between rank.min and rank.max).
23#'
24#' @export
25constructionModelesLassoRank <- function(S, k, mini, maxi, X, Y, eps, rank.min, rank.max,
26 ncores, fast, verbose)
27{
28 n <- nrow(X)
29 p <- ncol(X)
30 m <- ncol(Y)
31 L <- length(S)
32
33 # Possible interesting ranks
34 deltaRank <- rank.max - rank.min + 1
35 Size <- deltaRank^k
36 RankLambda <- matrix(0, nrow = Size * L, ncol = k + 1)
37 for (r in 1:k)
38 {
39 # On veut le tableau de toutes les combinaisons de rangs possibles, et des
40 # lambdas Dans la premiere colonne : on repete (rank.max-rank.min)^(k-1) chaque
41 # chiffre : ca remplit la colonne Dans la deuxieme : on repete
42 # (rank.max-rank.min)^(k-2) chaque chiffre, et on fait ca (rank.max-rank.min)^2
43 # fois ... Dans la derniere, on repete chaque chiffre une fois, et on fait ca
44 # (rank.min-rank.max)^(k-1) fois.
45 RankLambda[, r] <- rep(rank.min + rep(0:(deltaRank - 1), deltaRank^(r - 1),
46 each = deltaRank^(k - r)), each = L)
47 }
48 RankLambda[, k + 1] <- rep(1:L, times = Size)
49
50 if (ncores > 1)
51 {
52 cl <- parallel::makeCluster(ncores, outfile = "")
53 parallel::clusterExport(cl, envir = environment(), varlist = c("A1", "Size",
54 "Pi", "Rho", "mini", "maxi", "X", "Y", "eps", "Rank", "m", "phi", "ncores",
55 "verbose"))
56 }
57
58 computeAtLambda <- function(index)
59 {
60 lambdaIndex <- RankLambda[index, k + 1]
61 rankIndex <- RankLambda[index, 1:k]
62 if (ncores > 1)
63 require("valse") #workers start with an empty environment
64
65 # 'relevant' will be the set of relevant columns
66 selected <- S[[lambdaIndex]]$selected
67 relevant <- c()
68 for (j in 1:p)
69 {
70 if (length(selected[[j]]) > 0)
71 relevant <- c(relevant, j)
72 }
73 if (max(rankIndex) < length(relevant))
74 {
75 phi <- array(0, dim = c(p, m, k))
76 if (length(relevant) > 0)
77 {
78 res <- EMGrank(S[[lambdaIndex]]$Pi, S[[lambdaIndex]]$Rho, mini, maxi,
79 X[, relevant], Y, eps, rankIndex, fast)
80 llh <- c(res$LLF, sum(rankIndex * (length(relevant) - rankIndex + m)))
81 phi[relevant, , ] <- res$phi
82 }
83 list(llh = llh, phi = phi, pi = S[[lambdaIndex]]$Pi, rho = S[[lambdaIndex]]$Rho)
84 }
85 }
86
87 # For each lambda in the grid we compute the estimators
88 out <-
89 if (ncores > 1) {
90 parallel::parLapply(cl, seq_len(length(S) * Size), computeAtLambda)
91 } else {
92 lapply(seq_len(length(S) * Size), computeAtLambda)
93 }
94
95 if (ncores > 1)
96 parallel::stopCluster(cl)
97
98 out
99}