#' @export
valse = function(X, Y, procedure='LassoMLE', selecMod='DDSE', gamma=1, mini=10, maxi=50,
eps=1e-4, kmin=2, kmax=4, rang.min=1, rang.max=10, ncores_outer=1, ncores_inner=1,
- size_coll_mod=50, fast=TRUE, verbose=FALSE)
+ size_coll_mod=50, fast=TRUE, verbose=FALSE, plot = TRUE)
{
p = dim(X)[2]
m = dim(Y)[2]
mod = as.character(tableauRecap[indModSel,1])
listMod = as.integer(unlist(strsplit(mod, "[.]")))
+ if (plot){
+ print(plot_valse())
+ }
models_list[[listMod[1]]][[listMod[2]]]
- models_list
+
}
-#TODO: reprendre les plots d'Emilie dans reports/...
+#' Plot
+#'
+#' It is a function which plots relevant parameters
+#'
+#'
+#' @return several plots
+#'
+#' @examples TODO
+#'
+#' @export
+#'
+plot_valse = function(){
+ require("gridExtra")
+ require("ggplot2")
+ require("reshape2")
+
+ ## regression matrices
+ gReg = list()
+ for (r in 1:K){
+ Melt = melt(t((model$phi[,,r])))
+ gReg[[r]] = ggplot(data = Melt, aes(x=Var1, y=Var2, fill=value)) + geom_tile() +
+ scale_fill_gradient2(low = "blue", high = "red", mid = "white", midpoint = 0, space = "Lab") +
+ ggtitle(paste("Regression matrices in cluster",r))
+ }
+ gReg
+
+ ## Differences between two clusters
+ k1 = 1
+ k2 = 2
+ Melt = melt(t(model$phi[,,k1]-model$phi[,,k2]))
+ gDiff = ggplot(data = Melt, aes(x=Var1, y=Var2, fill=value)) + geom_tile() +
+ scale_fill_gradient2(low = "blue", high = "red", mid = "white", midpoint = 0, space = "Lab") +
+ ggtitle(paste("Difference between regression matrices in cluster",k1, "and", k2))
+ gDiff
+
+ ### Covariance matrices
+ matCov = matrix(NA, nrow = dim(model$rho[,,1])[1], ncol = K)
+ for (r in 1:K){
+ matCov[,r] = diag(model$rho[,,r])
+ }
+ MeltCov = melt(matCov)
+ gCov = ggplot(data =MeltCov, aes(x=Var1, y=Var2, fill=value)) + geom_tile() +
+ scale_fill_gradient2(low = "blue", high = "red", mid = "white", midpoint = 0, space = "Lab") +
+ ggtitle(paste("Regression matrices in cluster",r))
+ gCov
+
+ ### proportions
+ Gam = matrix(0, ncol = K, nrow = n)
+ gam = Gam
+ for (i in 1:n){
+ for (r in 1:K){
+ sqNorm2 = sum( (Y[i,]%*%model$rho[,,r]-X[i,]%*%model$phi[,,r])^2 )
+ Gam[i,r] = model$pi[r] * exp(-0.5*sqNorm2)* det(model$rho[,,r])
+ }
+ gam[i,] = Gam[i,] / sum(Gam[i,])
+ }
+ affec = apply(gam, 1,which.max)
+ gam2 = matrix(NA, ncol = K, nrow = n)
+ for (i in 1:n){
+ gam2[i, ] = c(gam[i, affec[i]], affec[i])
+ }
+ bp <- ggplot(data.frame(gam2), aes(x=X2, y=X1, color=X2, group = X2)) +
+ geom_boxplot() + theme(legend.position = "none")
+ bp + background_grid(major = "xy", minor = "none")
+
+ ### Mean in each cluster
+ XY = cbind(X,Y)
+ XY_class= list()
+ meanPerClass= matrix(0, ncol = K, nrow = dim(XY)[2])
+ for (r in 1:K){
+ XY_class[[r]] = XY[affec == r, ]
+ meanPerClass[,r] = apply(XY_class[[r]], 2, mean)
+ }
+ data = data.frame(mean = as.vector(meanPerClass), cluster = as.character(rep(1:K, each = dim(XY)[2])), time = rep(1:dim(XY)[2],K))
+ g = ggplot(data, aes(x=time, y = mean, group = cluster, color = cluster))
+ g + geom_line(aes(linetype=cluster, color=cluster))+ geom_point(aes(color=cluster))
+
+}
\ No newline at end of file
--- /dev/null
+#' Plot
+#'
+#' It is a function which plots relevant parameters
+#'
+#'
+#' @return several plots
+#'
+#' @examples TODO
+#'
+#' @export
+#'
+plot_valse = function(){
+ require("gridExtra")
+ require("ggplot2")
+ require("reshape2")
+
+ ## regression matrices
+ gReg = list()
+ for (r in 1:K){
+ Melt = melt(t((model$phi[,,r])))
+ gReg[[r]] = ggplot(data = Melt, aes(x=Var1, y=Var2, fill=value)) + geom_tile() +
+ scale_fill_gradient2(low = "blue", high = "red", mid = "white", midpoint = 0, space = "Lab") +
+ ggtitle(paste("Regression matrices in cluster",r))
+ }
+ print(gReg)
+
+ ## Differences between two clusters
+ k1 = 1
+ k2 = 2
+ Melt = melt(t(model$phi[,,k1]-model$phi[,,k2]))
+ gDiff = ggplot(data = Melt, aes(x=Var1, y=Var2, fill=value)) + geom_tile() +
+ scale_fill_gradient2(low = "blue", high = "red", mid = "white", midpoint = 0, space = "Lab") +
+ ggtitle(paste("Difference between regression matrices in cluster",k1, "and", k2))
+ print(gDiff)
+
+ ### Covariance matrices
+ matCov = matrix(NA, nrow = dim(model$rho[,,1])[1], ncol = K)
+ for (r in 1:K){
+ matCov[,r] = diag(model$rho[,,r])
+ }
+ MeltCov = melt(matCov)
+ gCov = ggplot(data =MeltCov, aes(x=Var1, y=Var2, fill=value)) + geom_tile() +
+ scale_fill_gradient2(low = "blue", high = "red", mid = "white", midpoint = 0, space = "Lab") +
+ ggtitle("Covariance matrices")
+ print(gCov )
+
+ ### proportions
+ Gam = matrix(0, ncol = K, nrow = n)
+ gam = Gam
+ for (i in 1:n){
+ for (r in 1:K){
+ sqNorm2 = sum( (Y[i,]%*%model$rho[,,r]-X[i,]%*%model$phi[,,r])^2 )
+ Gam[i,r] = model$pi[r] * exp(-0.5*sqNorm2)* det(model$rho[,,r])
+ }
+ gam[i,] = Gam[i,] / sum(Gam[i,])
+ }
+ affec = apply(gam, 1,which.max)
+ gam2 = matrix(NA, ncol = K, nrow = n)
+ for (i in 1:n){
+ gam2[i, ] = c(gam[i, affec[i]], affec[i])
+ }
+ bp <- ggplot(data.frame(gam2), aes(x=X2, y=X1, color=X2, group = X2)) +
+ geom_boxplot() + theme(legend.position = "none")
+ print(bp + background_grid(major = "xy", minor = "none"))
+
+ ### Mean in each cluster
+ XY = cbind(X,Y)
+ XY_class= list()
+ meanPerClass= matrix(0, ncol = K, nrow = dim(XY)[2])
+ for (r in 1:K){
+ XY_class[[r]] = XY[affec == r, ]
+ meanPerClass[,r] = apply(XY_class[[r]], 2, mean)
+ }
+ data = data.frame(mean = as.vector(meanPerClass), cluster = as.character(rep(1:K, each = dim(XY)[2])), time = rep(1:dim(XY)[2],K))
+ g = ggplot(data, aes(x=time, y = mean, group = cluster, color = cluster))
+ print(g + geom_line(aes(linetype=cluster, color=cluster))+ geom_point(aes(color=cluster)) + ggtitle('Mean per cluster'))
+
+}
\ No newline at end of file
### Regression matrices
-model = res_valse
+model = Res
K = dim(model$phi)[3]
valMax = max(abs(model$phi))
require(fields)
+
if (K<4){
par(mfrow = c(1,K))
-} else par(mfrow = c(2, (K+1)/2))
+} else op = par(mfrow = c(2, (K+1)/2))
+
+## Phi
for (r in 1:K){
- image.plot(t(abs(model$phi[,,r])),
+ image.plot(t(abs(model$phi[,,r])),
col=gray(rev(seq(0,64,length.out=65))/65),breaks=seq(0,valMax,length.out=66))
}
+par(mfrow = c(1,K),oma = c(0,0,3,0))
+mtext("Regression matrices in each cluster", side=3, line=4, font=2, cex=2, col='red')
+
+par(mfrow = c(1,2), oma=c(0,0,3,0))
+for (i in 1:4)
+ plot(runif(20), runif(20),
+ main=paste("random plot (",i,")",sep=''))
+par(op)
+mtext("Four plots",
+ side=3, line=4, font=2, cex=2, col='red')
### Zoom onto two classes we want to compare
kSel = c(1,2)
Gam = matrix(0, ncol = K, nrow = n)
gam = Gam
for (i in 1:n){
- for (r in 1:k){
+ for (r in 1:K){
sqNorm2 = sum( (Y[i,]%*%model$rho[,,r]-X[i,]%*%model$phi[,,r])^2 )
Gam[i,r] = model$pi[r] * exp(-0.5*sqNorm2)* det(model$rho[,,r])
}