From: emilie Date: Fri, 17 Mar 2017 12:13:40 +0000 (+0100) Subject: essaiPlot almost ok : add a color per cluster? For now, it is a script X-Git-Url: https://git.auder.net/js/img/%7B%7B%20asset%28%27mixstore/current/current/gitweb.css?a=commitdiff_plain;h=64b28e3edeef11b4442b6014ec89246810ebc1cf;p=valse.git essaiPlot almost ok : add a color per cluster? For now, it is a script --- diff --git a/pkg/R/constructionModelesLassoMLE.R b/pkg/R/constructionModelesLassoMLE.R index f86e816..d2bb9a5 100644 --- a/pkg/R/constructionModelesLassoMLE.R +++ b/pkg/R/constructionModelesLassoMLE.R @@ -51,35 +51,38 @@ constructionModelesLassoMLE = function(phiInit,rhoInit,piInit,gamInit,mini,maxi, out = lapply( seq_along(selected), function(lambda) { + print(lambda) sel.lambda = selected[[lambda]] col.sel = which(colSums(sel.lambda)!=0) - res_EM = EMGLLF(phiInit[col.sel,,],rhoInit,piInit,gamInit,mini,maxi,gamma,0.,X[,col.sel],Y,tau) - phiLambda2 = res_EM$phi - rhoLambda = res_EM$rho - piLambda = res_EM$pi - for (j in 1:length(col.sel)){ - phiLambda[col.sel[j],,] = phiLambda2[j,,] - } - - dimension = 0 - for (j in 1:p){ - b = setdiff(1:m, sel.lambda[,j]) - if (length(b) > 0){ - phiLambda[j,b,] = 0.0 + if (length(col.sel)>0){ + res_EM = EMGLLF(phiInit[col.sel,,],rhoInit,piInit,gamInit,mini,maxi,gamma,0.,X[,col.sel],Y,tau) + phiLambda2 = res_EM$phi + rhoLambda = res_EM$rho + piLambda = res_EM$pi + for (j in 1:length(col.sel)){ + phiLambda[col.sel[j],,] = phiLambda2[j,,] } - dimension = dimension + sum(sel.lambda[,j]!=0) - } - - #on veut calculer la vraisemblance avec toutes nos estimations - densite = vector("double",n) - for (r in 1:k) - { - delta = Y%*%rhoLambda[,,r] - (X[, col.sel]%*%phiLambda[col.sel,,r]) - densite = densite + piLambda[r] * - det(rhoLambda[,,r])/(sqrt(2*base::pi))^m * exp(-tcrossprod(delta)/2.0) + + dimension = 0 + for (j in 1:p){ + b = setdiff(1:m, sel.lambda[,j]) + if (length(b) > 0){ + phiLambda[j,b,] = 0.0 + } + dimension = dimension + sum(sel.lambda[,j]!=0) + } + + #on veut calculer la vraisemblance avec toutes nos estimations + densite = vector("double",n) + for (r in 1:k) + { + delta = Y%*%rhoLambda[,,r] - (X[, col.sel]%*%phiLambda[col.sel,,r]) + densite = densite + piLambda[r] * + det(rhoLambda[,,r])/(sqrt(2*base::pi))^m * exp(-tcrossprod(delta)/2.0) + } + llhLambda = c( sum(log(densite)), (dimension+m+1)*k-1 ) + list("phi"= phiLambda, "rho"= rhoLambda, "pi"= piLambda, "llh" = llhLambda) } - llhLambda = c( sum(log(densite)), (dimension+m+1)*k-1 ) - list("phi"= phiLambda, "rho"= rhoLambda, "pi"= piLambda, "llh" = llhLambda) } ) return(out) diff --git a/pkg/R/selectiontotale.R b/pkg/R/selectiontotale.R index 25128cb..7209fed 100644 --- a/pkg/R/selectiontotale.R +++ b/pkg/R/selectiontotale.R @@ -32,6 +32,7 @@ selectiontotale = function(phiInit,rhoInit,piInit,gamInit,mini,maxi,gamma,glambd cpt = 0 #Pour chaque lambda de la grille, on calcule les coefficients for (lambdaIndex in 1:length(glambda)){ + print(lambdaIndex) params = EMGLLF(phiInit,rhoInit,piInit,gamInit,mini,maxi,gamma,glambda[lambdaIndex],X,Y,tau) p = dim(phiInit)[1] diff --git a/pkg/R/valse.R b/pkg/R/valse.R index 779f61a..396a9a1 100644 --- a/pkg/R/valse.R +++ b/pkg/R/valse.R @@ -16,7 +16,7 @@ #' @export #----------------------------------------------------------------------- valse = function(X,Y,procedure = 'LassoMLE',selecMod = 'DDSE',gamma = 1,mini = 10, - maxi = 100,eps = 1e-4,kmin = 2,kmax = 3, + maxi = 50,eps = 1e-4,kmin = 2,kmax = 3, rang.min = 1,rang.max = 10) { ################################## #core workflow: compute all models @@ -45,6 +45,9 @@ valse = function(X,Y,procedure = 'LassoMLE',selecMod = 'DDSE',gamma = 1,mini = 1 source('~/valse/pkg/R/gridLambda.R') grid_lambda <<- gridLambda(phiInit, rhoInit, piInit, gamInit, X, Y, gamma, mini, maxi, eps) + if (length(grid_lambda)>50){ + grid_lambda = grid_lambda[seq(1, length(grid_lambda), length.out = 50)] + } print("Compute relevant parameters") #select variables according to each regularization parameter #from the grid: A1 corresponding to selected variables, and diff --git a/reports/essai16mars.R b/reports/essai16mars.R index 416f895..88659ab 100644 --- a/reports/essai16mars.R +++ b/reports/essai16mars.R @@ -1,5 +1,5 @@ p = 10 -q = 15 +q = 8 k = 2 D = 20 diff --git a/reports/essaiPlot.R b/reports/essaiPlot.R index e69de29..dea7e00 100644 --- a/reports/essaiPlot.R +++ b/reports/essaiPlot.R @@ -0,0 +1,60 @@ +### Regression matrices +model = res_valse +K = dim(model$phi)[3] +valMax = max(abs(model$phi)) + +require(fields) +if (K<4){ + par(mfrow = c(1,K)) +} else par(mfrow = c(2, (K+1)/2)) + +for (r in 1:K){ + image.plot(t(abs(model$phi[,,r])), + col=gray(rev(seq(0,64,length.out=65))/65),breaks=seq(0,valMax,length.out=66)) +} + +### Zoom onto two classes we want to compare +kSel = c(1,2) +par(mfrow = c(1,3)) + +for (r in kSel){ + image.plot(t(abs(model$phi[,,r])),title="hat{beta}",xaxt="n",yaxt="n", + col=gray(rev(seq(0,64,length.out=65))/65),breaks=seq(0,valMax,length.out=66)) +} +image.plot(t(abs(model$phi[,,kSel[1]]-model$phi[,,kSel[2]])), + col=gray(rev(seq(0,64,length.out=65))/65),breaks=seq(0,valMax,length.out=66)) + +### Covariance matrices +par(mfrow = c(K, 1)) +for (r in 1:K){ + image.plot(matrix(diag(model$rho[,,r]), ncol= 1), + col=gray(rev(seq(0,64,length.out=65))/65),breaks=seq(0,valMax,length.out=66)) +} + +### proportions +Gam = matrix(0, ncol = K, nrow = n) +gam = Gam +for (i in 1:n){ + for (r in 1:k){ + sqNorm2 = sum( (Y[i,]%*%model$rho[,,r]-X[i,]%*%model$phi[,,r])^2 ) + Gam[i,r] = model$pi[r] * exp(-0.5*sqNorm2)* det(model$rho[,,r]) + } + gam[i,] = Gam[i,] / sum(Gam[i,]) +} +affec = apply(gam, 1,which.max) +gam2 = matrix(NA, ncol = K, nrow = n) +for (i in 1:n){ + gam2[i, affec[i]] = gam[i, affec[i]] +} +boxplot(gam2) + +### Mean in each cluster +XY = cbind(X,Y) +XY_class= list() +meanPerClass= matrix(0, ncol = K, nrow = dim(XY)[2]) +for (r in 1:K){ + XY_class[[r]] = XY[affec == r, ] + meanPerClass[,r] = apply(XY_class[[r]], 2, mean) +} + +matplot(meanPerClass, type='l') \ No newline at end of file