fix few things
[valse.git] / pkg / R / EMGrank.R
... / ...
CommitLineData
1#' EMGrank
2#'
3#' Description de EMGrank
4#'
5#' @param Pi Parametre de proportion
6#' @param Rho Parametre initial de variance renormalisé
7#' @param mini Nombre minimal d'itérations dans l'algorithme EM
8#' @param maxi Nombre maximal d'itérations dans l'algorithme EM
9#' @param X Régresseurs
10#' @param Y Réponse
11#' @param tau Seuil pour accepter la convergence
12#' @param rank Vecteur des rangs possibles
13#'
14#' @return A list ...
15#' phi : parametre de moyenne renormalisé, calculé par l'EM
16#' LLF : log vraisemblance associé à cet échantillon, pour les valeurs estimées des paramètres
17#'
18#' @export
19EMGrank <- function(Pi, Rho, mini, maxi, X, Y, tau, rank, fast=TRUE)
20{
21 if (!fast)
22 {
23 # Function in R
24 return (.EMGrank_R(Pi, Rho, mini, maxi, X, Y, tau, rank))
25 }
26
27 # Function in C
28 n = nrow(X) #nombre d'echantillons
29 p = ncol(X) #nombre de covariables
30 m = ncol(Y) #taille de Y (multivarié)
31 k = length(Pi) #nombre de composantes dans le mélange
32 .Call("EMGrank",
33 Pi, Rho, mini, maxi, X, Y, tau, rank,
34 phi=double(p*m*k), LLF=double(1),
35 n, p, m, k,
36 PACKAGE="valse")
37}
38
39#helper to always have matrices as arg (TODO: put this elsewhere? improve?)
40# --> Yes, we should use by-columns storage everywhere... [later!]
41matricize <- function(X)
42{
43 if (!is.matrix(X))
44 return (t(as.matrix(X)))
45 return (X)
46}
47
48# R version - slow but easy to read
49.EMGrank_R = function(Pi, Rho, mini, maxi, X, Y, tau, rank)
50{
51 require(MASS)
52 #matrix dimensions
53 n = dim(X)[1]
54 p = dim(X)[2]
55 m = dim(Rho)[2]
56 k = dim(Rho)[3]
57
58 #init outputs
59 phi = array(0, dim=c(p,m,k))
60 Z = rep(1, n)
61 LLF = 0
62
63 #local variables
64 Phi = array(0, dim=c(p,m,k))
65 deltaPhi = c()
66 sumDeltaPhi = 0.
67 deltaPhiBufferSize = 20
68
69 #main loop
70 ite = 1
71 while (ite<=mini || (ite<=maxi && sumDeltaPhi>tau))
72 {
73 #M step: update for Beta ( and then phi)
74 for(r in 1:k)
75 {
76 Z_indice = seq_len(n)[Z==r] #indices where Z == r
77 if (length(Z_indice) == 0)
78 next
79 #U,S,V = SVD of (t(Xr)Xr)^{-1} * t(Xr) * Yr
80 s = svd( ginv(crossprod(matricize(X[Z_indice,]))) %*%
81 crossprod(matricize(X[Z_indice,]),matricize(Y[Z_indice,])) )
82 S = s$d
83 #Set m-rank(r) singular values to zero, and recompose
84 #best rank(r) approximation of the initial product
85 if(rank[r] < length(S))
86 S[(rank[r]+1):length(S)] = 0
87 phi[,,r] = s$u %*% diag(S) %*% t(s$v) %*% Rho[,,r]
88 }
89
90 #Step E and computation of the loglikelihood
91 sumLogLLF2 = 0
92 for(i in seq_len(n))
93 {
94 sumLLF1 = 0
95 maxLogGamIR = -Inf
96 for (r in seq_len(k))
97 {
98 dotProduct = tcrossprod(Y[i,]%*%Rho[,,r]-X[i,]%*%phi[,,r])
99 logGamIR = log(Pi[r]) + log(det(Rho[,,r])) - 0.5*dotProduct
100 #Z[i] = index of max (gam[i,])
101 if(logGamIR > maxLogGamIR)
102 {
103 Z[i] = r
104 maxLogGamIR = logGamIR
105 }
106 sumLLF1 = sumLLF1 + exp(logGamIR) / (2*pi)^(m/2)
107 }
108 sumLogLLF2 = sumLogLLF2 + log(sumLLF1)
109 }
110
111 LLF = -1/n * sumLogLLF2
112
113 #update distance parameter to check algorithm convergence (delta(phi, Phi))
114 deltaPhi = c( deltaPhi, max( (abs(phi-Phi)) / (1+abs(phi)) ) ) #TODO: explain?
115 if (length(deltaPhi) > deltaPhiBufferSize)
116 deltaPhi = deltaPhi[2:length(deltaPhi)]
117 sumDeltaPhi = sum(abs(deltaPhi))
118
119 #update other local variables
120 Phi = phi
121 ite = ite+1
122 }
123 return(list("phi"=phi, "LLF"=LLF))
124}