out = lapply( seq_along(selected), function(lambda)
{
+ print(lambda)
sel.lambda = selected[[lambda]]
col.sel = which(colSums(sel.lambda)!=0)
- res_EM = EMGLLF(phiInit[col.sel,,],rhoInit,piInit,gamInit,mini,maxi,gamma,0.,X[,col.sel],Y,tau)
- phiLambda2 = res_EM$phi
- rhoLambda = res_EM$rho
- piLambda = res_EM$pi
- for (j in 1:length(col.sel)){
- phiLambda[col.sel[j],,] = phiLambda2[j,,]
- }
-
- dimension = 0
- for (j in 1:p){
- b = setdiff(1:m, sel.lambda[,j])
- if (length(b) > 0){
- phiLambda[j,b,] = 0.0
+ if (length(col.sel)>0){
+ res_EM = EMGLLF(phiInit[col.sel,,],rhoInit,piInit,gamInit,mini,maxi,gamma,0.,X[,col.sel],Y,tau)
+ phiLambda2 = res_EM$phi
+ rhoLambda = res_EM$rho
+ piLambda = res_EM$pi
+ for (j in 1:length(col.sel)){
+ phiLambda[col.sel[j],,] = phiLambda2[j,,]
}
- dimension = dimension + sum(sel.lambda[,j]!=0)
- }
-
- #on veut calculer la vraisemblance avec toutes nos estimations
- densite = vector("double",n)
- for (r in 1:k)
- {
- delta = Y%*%rhoLambda[,,r] - (X[, col.sel]%*%phiLambda[col.sel,,r])
- densite = densite + piLambda[r] *
- det(rhoLambda[,,r])/(sqrt(2*base::pi))^m * exp(-tcrossprod(delta)/2.0)
+
+ dimension = 0
+ for (j in 1:p){
+ b = setdiff(1:m, sel.lambda[,j])
+ if (length(b) > 0){
+ phiLambda[j,b,] = 0.0
+ }
+ dimension = dimension + sum(sel.lambda[,j]!=0)
+ }
+
+ #on veut calculer la vraisemblance avec toutes nos estimations
+ densite = vector("double",n)
+ for (r in 1:k)
+ {
+ delta = Y%*%rhoLambda[,,r] - (X[, col.sel]%*%phiLambda[col.sel,,r])
+ densite = densite + piLambda[r] *
+ det(rhoLambda[,,r])/(sqrt(2*base::pi))^m * exp(-tcrossprod(delta)/2.0)
+ }
+ llhLambda = c( sum(log(densite)), (dimension+m+1)*k-1 )
+ list("phi"= phiLambda, "rho"= rhoLambda, "pi"= piLambda, "llh" = llhLambda)
}
- llhLambda = c( sum(log(densite)), (dimension+m+1)*k-1 )
- list("phi"= phiLambda, "rho"= rhoLambda, "pi"= piLambda, "llh" = llhLambda)
}
)
return(out)
cpt = 0
#Pour chaque lambda de la grille, on calcule les coefficients
for (lambdaIndex in 1:length(glambda)){
+ print(lambdaIndex)
params =
EMGLLF(phiInit,rhoInit,piInit,gamInit,mini,maxi,gamma,glambda[lambdaIndex],X,Y,tau)
p = dim(phiInit)[1]
#' @export
#-----------------------------------------------------------------------
valse = function(X,Y,procedure = 'LassoMLE',selecMod = 'DDSE',gamma = 1,mini = 10,
- maxi = 100,eps = 1e-4,kmin = 2,kmax = 3,
+ maxi = 50,eps = 1e-4,kmin = 2,kmax = 3,
rang.min = 1,rang.max = 10) {
##################################
#core workflow: compute all models
source('~/valse/pkg/R/gridLambda.R')
grid_lambda <<- gridLambda(phiInit, rhoInit, piInit, gamInit, X, Y, gamma, mini, maxi, eps)
+ if (length(grid_lambda)>50){
+ grid_lambda = grid_lambda[seq(1, length(grid_lambda), length.out = 50)]
+ }
print("Compute relevant parameters")
#select variables according to each regularization parameter
#from the grid: A1 corresponding to selected variables, and
p = 10
-q = 15
+q = 8
k = 2
D = 20
+### Regression matrices
+model = res_valse
+K = dim(model$phi)[3]
+valMax = max(abs(model$phi))
+
+require(fields)
+if (K<4){
+ par(mfrow = c(1,K))
+} else par(mfrow = c(2, (K+1)/2))
+
+for (r in 1:K){
+ image.plot(t(abs(model$phi[,,r])),
+ col=gray(rev(seq(0,64,length.out=65))/65),breaks=seq(0,valMax,length.out=66))
+}
+
+### Zoom onto two classes we want to compare
+kSel = c(1,2)
+par(mfrow = c(1,3))
+
+for (r in kSel){
+ image.plot(t(abs(model$phi[,,r])),title="hat{beta}",xaxt="n",yaxt="n",
+ col=gray(rev(seq(0,64,length.out=65))/65),breaks=seq(0,valMax,length.out=66))
+}
+image.plot(t(abs(model$phi[,,kSel[1]]-model$phi[,,kSel[2]])),
+ col=gray(rev(seq(0,64,length.out=65))/65),breaks=seq(0,valMax,length.out=66))
+
+### Covariance matrices
+par(mfrow = c(K, 1))
+for (r in 1:K){
+ image.plot(matrix(diag(model$rho[,,r]), ncol= 1),
+ col=gray(rev(seq(0,64,length.out=65))/65),breaks=seq(0,valMax,length.out=66))
+}
+
+### proportions
+Gam = matrix(0, ncol = K, nrow = n)
+gam = Gam
+for (i in 1:n){
+ for (r in 1:k){
+ sqNorm2 = sum( (Y[i,]%*%model$rho[,,r]-X[i,]%*%model$phi[,,r])^2 )
+ Gam[i,r] = model$pi[r] * exp(-0.5*sqNorm2)* det(model$rho[,,r])
+ }
+ gam[i,] = Gam[i,] / sum(Gam[i,])
+}
+affec = apply(gam, 1,which.max)
+gam2 = matrix(NA, ncol = K, nrow = n)
+for (i in 1:n){
+ gam2[i, affec[i]] = gam[i, affec[i]]
+}
+boxplot(gam2)
+
+### Mean in each cluster
+XY = cbind(X,Y)
+XY_class= list()
+meanPerClass= matrix(0, ncol = K, nrow = dim(XY)[2])
+for (r in 1:K){
+ XY_class[[r]] = XY[affec == r, ]
+ meanPerClass[,r] = apply(XY_class[[r]], 2, mean)
+}
+
+matplot(meanPerClass, type='l')
\ No newline at end of file