From: emilie Date: Tue, 11 Apr 2017 16:36:21 +0000 (+0200) Subject: Update plot_valse and add it to main.R X-Git-Url: https://git.auder.net/variants/current/doc/css/%7B%7B%20asset%28%27mixstore/%3C?a=commitdiff_plain;h=4c9cc558a39c034ed75d0d5531fa0ce29d8561fc;p=valse.git Update plot_valse and add it to main.R --- diff --git a/pkg/R/main.R b/pkg/R/main.R index 6ff15b3..2ae01e6 100644 --- a/pkg/R/main.R +++ b/pkg/R/main.R @@ -27,7 +27,7 @@ #' @export valse = function(X, Y, procedure='LassoMLE', selecMod='DDSE', gamma=1, mini=10, maxi=50, eps=1e-4, kmin=2, kmax=4, rang.min=1, rang.max=10, ncores_outer=1, ncores_inner=1, - size_coll_mod=50, fast=TRUE, verbose=FALSE) + size_coll_mod=50, fast=TRUE, verbose=FALSE, plot = TRUE) { p = dim(X)[2] m = dim(Y)[2] @@ -137,6 +137,9 @@ print(tableauRecap) mod = as.character(tableauRecap[indModSel,1]) listMod = as.integer(unlist(strsplit(mod, "[.]"))) + if (plot){ + print(plot_valse()) + } models_list[[listMod[1]]][[listMod[2]]] - models_list + } diff --git a/pkg/R/plot.R b/pkg/R/plot.R index a8da583..7fdaa71 100644 --- a/pkg/R/plot.R +++ b/pkg/R/plot.R @@ -1 +1,78 @@ -#TODO: reprendre les plots d'Emilie dans reports/... +#' Plot +#' +#' It is a function which plots relevant parameters +#' +#' +#' @return several plots +#' +#' @examples TODO +#' +#' @export +#' +plot_valse = function(){ + require("gridExtra") + require("ggplot2") + require("reshape2") + + ## regression matrices + gReg = list() + for (r in 1:K){ + Melt = melt(t((model$phi[,,r]))) + gReg[[r]] = ggplot(data = Melt, aes(x=Var1, y=Var2, fill=value)) + geom_tile() + + scale_fill_gradient2(low = "blue", high = "red", mid = "white", midpoint = 0, space = "Lab") + + ggtitle(paste("Regression matrices in cluster",r)) + } + gReg + + ## Differences between two clusters + k1 = 1 + k2 = 2 + Melt = melt(t(model$phi[,,k1]-model$phi[,,k2])) + gDiff = ggplot(data = Melt, aes(x=Var1, y=Var2, fill=value)) + geom_tile() + + scale_fill_gradient2(low = "blue", high = "red", mid = "white", midpoint = 0, space = "Lab") + + ggtitle(paste("Difference between regression matrices in cluster",k1, "and", k2)) + gDiff + + ### Covariance matrices + matCov = matrix(NA, nrow = dim(model$rho[,,1])[1], ncol = K) + for (r in 1:K){ + matCov[,r] = diag(model$rho[,,r]) + } + MeltCov = melt(matCov) + gCov = ggplot(data =MeltCov, aes(x=Var1, y=Var2, fill=value)) + geom_tile() + + scale_fill_gradient2(low = "blue", high = "red", mid = "white", midpoint = 0, space = "Lab") + + ggtitle(paste("Regression matrices in cluster",r)) + gCov + + ### proportions + Gam = matrix(0, ncol = K, nrow = n) + gam = Gam + for (i in 1:n){ + for (r in 1:K){ + sqNorm2 = sum( (Y[i,]%*%model$rho[,,r]-X[i,]%*%model$phi[,,r])^2 ) + Gam[i,r] = model$pi[r] * exp(-0.5*sqNorm2)* det(model$rho[,,r]) + } + gam[i,] = Gam[i,] / sum(Gam[i,]) + } + affec = apply(gam, 1,which.max) + gam2 = matrix(NA, ncol = K, nrow = n) + for (i in 1:n){ + gam2[i, ] = c(gam[i, affec[i]], affec[i]) + } + bp <- ggplot(data.frame(gam2), aes(x=X2, y=X1, color=X2, group = X2)) + + geom_boxplot() + theme(legend.position = "none") + bp + background_grid(major = "xy", minor = "none") + + ### Mean in each cluster + XY = cbind(X,Y) + XY_class= list() + meanPerClass= matrix(0, ncol = K, nrow = dim(XY)[2]) + for (r in 1:K){ + XY_class[[r]] = XY[affec == r, ] + meanPerClass[,r] = apply(XY_class[[r]], 2, mean) + } + data = data.frame(mean = as.vector(meanPerClass), cluster = as.character(rep(1:K, each = dim(XY)[2])), time = rep(1:dim(XY)[2],K)) + g = ggplot(data, aes(x=time, y = mean, group = cluster, color = cluster)) + g + geom_line(aes(linetype=cluster, color=cluster))+ geom_point(aes(color=cluster)) + +} \ No newline at end of file diff --git a/pkg/R/plot_valse.R b/pkg/R/plot_valse.R new file mode 100644 index 0000000..05963c8 --- /dev/null +++ b/pkg/R/plot_valse.R @@ -0,0 +1,78 @@ +#' Plot +#' +#' It is a function which plots relevant parameters +#' +#' +#' @return several plots +#' +#' @examples TODO +#' +#' @export +#' +plot_valse = function(){ + require("gridExtra") + require("ggplot2") + require("reshape2") + + ## regression matrices + gReg = list() + for (r in 1:K){ + Melt = melt(t((model$phi[,,r]))) + gReg[[r]] = ggplot(data = Melt, aes(x=Var1, y=Var2, fill=value)) + geom_tile() + + scale_fill_gradient2(low = "blue", high = "red", mid = "white", midpoint = 0, space = "Lab") + + ggtitle(paste("Regression matrices in cluster",r)) + } + print(gReg) + + ## Differences between two clusters + k1 = 1 + k2 = 2 + Melt = melt(t(model$phi[,,k1]-model$phi[,,k2])) + gDiff = ggplot(data = Melt, aes(x=Var1, y=Var2, fill=value)) + geom_tile() + + scale_fill_gradient2(low = "blue", high = "red", mid = "white", midpoint = 0, space = "Lab") + + ggtitle(paste("Difference between regression matrices in cluster",k1, "and", k2)) + print(gDiff) + + ### Covariance matrices + matCov = matrix(NA, nrow = dim(model$rho[,,1])[1], ncol = K) + for (r in 1:K){ + matCov[,r] = diag(model$rho[,,r]) + } + MeltCov = melt(matCov) + gCov = ggplot(data =MeltCov, aes(x=Var1, y=Var2, fill=value)) + geom_tile() + + scale_fill_gradient2(low = "blue", high = "red", mid = "white", midpoint = 0, space = "Lab") + + ggtitle("Covariance matrices") + print(gCov ) + + ### proportions + Gam = matrix(0, ncol = K, nrow = n) + gam = Gam + for (i in 1:n){ + for (r in 1:K){ + sqNorm2 = sum( (Y[i,]%*%model$rho[,,r]-X[i,]%*%model$phi[,,r])^2 ) + Gam[i,r] = model$pi[r] * exp(-0.5*sqNorm2)* det(model$rho[,,r]) + } + gam[i,] = Gam[i,] / sum(Gam[i,]) + } + affec = apply(gam, 1,which.max) + gam2 = matrix(NA, ncol = K, nrow = n) + for (i in 1:n){ + gam2[i, ] = c(gam[i, affec[i]], affec[i]) + } + bp <- ggplot(data.frame(gam2), aes(x=X2, y=X1, color=X2, group = X2)) + + geom_boxplot() + theme(legend.position = "none") + print(bp + background_grid(major = "xy", minor = "none")) + + ### Mean in each cluster + XY = cbind(X,Y) + XY_class= list() + meanPerClass= matrix(0, ncol = K, nrow = dim(XY)[2]) + for (r in 1:K){ + XY_class[[r]] = XY[affec == r, ] + meanPerClass[,r] = apply(XY_class[[r]], 2, mean) + } + data = data.frame(mean = as.vector(meanPerClass), cluster = as.character(rep(1:K, each = dim(XY)[2])), time = rep(1:dim(XY)[2],K)) + g = ggplot(data, aes(x=time, y = mean, group = cluster, color = cluster)) + print(g + geom_line(aes(linetype=cluster, color=cluster))+ geom_point(aes(color=cluster)) + ggtitle('Mean per cluster')) + +} \ No newline at end of file diff --git a/reports/essaiPlot.R b/reports/essaiPlot.R index b000987..10b0e01 100644 --- a/reports/essaiPlot.R +++ b/reports/essaiPlot.R @@ -1,17 +1,30 @@ ### Regression matrices -model = res_valse +model = Res K = dim(model$phi)[3] valMax = max(abs(model$phi)) require(fields) + if (K<4){ par(mfrow = c(1,K)) -} else par(mfrow = c(2, (K+1)/2)) +} else op = par(mfrow = c(2, (K+1)/2)) + +## Phi for (r in 1:K){ - image.plot(t(abs(model$phi[,,r])), + image.plot(t(abs(model$phi[,,r])), col=gray(rev(seq(0,64,length.out=65))/65),breaks=seq(0,valMax,length.out=66)) } +par(mfrow = c(1,K),oma = c(0,0,3,0)) +mtext("Regression matrices in each cluster", side=3, line=4, font=2, cex=2, col='red') + +par(mfrow = c(1,2), oma=c(0,0,3,0)) +for (i in 1:4) + plot(runif(20), runif(20), + main=paste("random plot (",i,")",sep='')) +par(op) +mtext("Four plots", + side=3, line=4, font=2, cex=2, col='red') ### Zoom onto two classes we want to compare kSel = c(1,2) @@ -35,7 +48,7 @@ for (r in 1:K){ Gam = matrix(0, ncol = K, nrow = n) gam = Gam for (i in 1:n){ - for (r in 1:k){ + for (r in 1:K){ sqNorm2 = sum( (Y[i,]%*%model$rho[,,r]-X[i,]%*%model$phi[,,r])^2 ) Gam[i,r] = model$pi[r] * exp(-0.5*sqNorm2)* det(model$rho[,,r]) }