-vec_bin = function(X,r)
-{
- Z = c()
- indice = c()
- j = 1
- for (i in 1:length(X))
- {
- if(X[i] == r)
- {
- Z[i] = 1
- indice[j] = i
- j=j+1
- } else
- Z[i] = 0
- }
- return (list(Z=Z,indice=indice))
-}
-
+#' initialization of the EM algorithm
+#'
+#' @param k number of components
+#' @param X matrix of covariates (of size n*p)
+#' @param Y matrix of responses (of size n*m)
+#' @param tau threshold to stop EM algorithm
+#'
+#' @return a list with phiInit, rhoInit, piInit, gamInit
+#' @export
initSmallEM = function(k,X,Y,tau)
{
n = nrow(Y)
LLFinit1 = list()
require(MASS) #Moore-Penrose generalized inverse of matrix
+ require(mclust) # K-means with selection of K
for(repet in 1:20)
{
- clusters = hclust(dist(y)) #default distance : euclidean
- #cutree retourne les indices (à quel cluster indiv_i appartient) d'un clustering hierarchique
- clusterCut = cutree(clusters,k)
- Zinit1[,repet] = clusterCut
-
+ clusters = Mclust(matrix(c(X,Y),nrow=n),k) #default distance : euclidean
+ Zinit1[,repet] = clusters$classification
+
for(r in 1:k)
{
Z = Zinit1[,repet]
Z_bin = vec_bin(Z,r)
- Z_vec = Z_bin$Z #vecteur 0 et 1 aux endroits où Z==r
- Z_indice = Z_bin$indice #renvoit les indices où Z==r
-
+ Z_vec = Z_bin$Z #vecteur 0 et 1 aux endroits o? Z==r
+ Z_indice = Z_bin$indice #renvoit les indices o? Z==r
+
betaInit1[,,r,repet] =
ginv(t(x[Z_indice,])%*%x[Z_indice,])%*%t(x[Z_indice,])%*%y[Z_indice,]
sigmaInit1[,,r,repet] = diag(m)
rhoInit1[,,r,repet] = solve(sigmaInit1[,,r,repet])
piInit1[repet,r] = sum(Z_vec)/n
}
-
+
for(i in 1:n)
{
for(r in 1:k)
sumGamI = sum(gam[i,])
gamInit1[i,,repet]= Gam[i,] / sumGamI
}
-
+
miniInit = 10
maxiInit = 11
-
+
new_EMG = .Call("EMGLLF",phiInit1[,,,repet],rhoInit1[,,,repet],piInit1[repet,],
- gamInit1[,,repet],miniInit,maxiInit,1,0,x,y,tau)
+ gamInit1[,,repet],miniInit,maxiInit,1,0,x,y,tau)
LLFEessai = new_EMG$LLF
LLFinit1[repet] = LLFEessai[length(LLFEessai)]
}