indent everything: google rules...
[valse.git] / pkg / R / plot_valse.R
CommitLineData
ffdf9447 1#' Plot
4c9cc558 2#'
3#' It is a function which plots relevant parameters
4#'
5965d116 5#' @param X matrix of covariates (of size n*p)
6#' @param Y matrix of responses (of size n*m)
a6b60f91 7#' @param model the model constructed by valse procedure
8#' @param n sample size
4c9cc558 9#' @return several plots
10#'
11#' @examples TODO
12#'
13#' @export
14#'
ffdf9447
BA
15plot_valse <- function(X, Y, model, n, comp = FALSE, k1 = NA, k2 = NA)
16{
4c9cc558 17 require("gridExtra")
18 require("ggplot2")
19 require("reshape2")
a6b60f91 20 require("cowplot")
4c9cc558 21
ffdf9447 22 K <- length(model$pi)
4c9cc558 23 ## regression matrices
ffdf9447
BA
24 gReg <- list()
25 for (r in 1:K)
26 {
27 Melt <- melt(t((model$phi[, , r])))
28 gReg[[r]] <- ggplot(data = Melt, aes(x = Var1, y = Var2, fill = value)) +
29 geom_tile() + scale_fill_gradient2(low = "blue", high = "red", mid = "white",
30 midpoint = 0, space = "Lab") + ggtitle(paste("Regression matrices in cluster",
31 r))
4c9cc558 32 }
33 print(gReg)
34
35 ## Differences between two clusters
ffdf9447
BA
36 if (comp)
37 {
38 if (is.na(k1) || is.na(k))
39 {
40 print("k1 and k2 must be integers, representing the clusters you want to compare")
41 }
42 Melt <- melt(t(model$phi[, , k1] - model$phi[, , k2]))
43 gDiff <- ggplot(data = Melt, aes(x = Var1, y = Var2, fill = value)) + geom_tile() +
44 scale_fill_gradient2(low = "blue", high = "red", mid = "white", midpoint = 0,
45 space = "Lab") + ggtitle(paste("Difference between regression matrices in cluster",
46 k1, "and", k2))
fb6e49cb 47 print(gDiff)
48
49 }
4c9cc558 50
51 ### Covariance matrices
ffdf9447
BA
52 matCov <- matrix(NA, nrow = dim(model$rho[, , 1])[1], ncol = K)
53 for (r in 1:K)
54 {
55 matCov[, r] <- diag(model$rho[, , r])
4c9cc558 56 }
ffdf9447
BA
57 MeltCov <- melt(matCov)
58 gCov <- ggplot(data = MeltCov, aes(x = Var1, y = Var2, fill = value)) + geom_tile() +
59 scale_fill_gradient2(low = "blue", high = "red", mid = "white", midpoint = 0,
60 space = "Lab") + ggtitle("Covariance matrices")
61 print(gCov)
4c9cc558 62
fb6e49cb 63 ### Proportions
ffdf9447
BA
64 gam2 <- matrix(NA, ncol = K, nrow = n)
65 for (i in 1:n)
66 {
67 gam2[i, ] <- c(model$proba[i, model$affec[i]], model$affec[i])
4c9cc558 68 }
9fadef2b 69
ffdf9447
BA
70 bp <- ggplot(data.frame(gam2), aes(x = X2, y = X1, color = X2, group = X2)) +
71 geom_boxplot() + theme(legend.position = "none") + background_grid(major = "xy",
72 minor = "none")
fb6e49cb 73 print(bp)
4c9cc558 74
75 ### Mean in each cluster
ffdf9447
BA
76 XY <- cbind(X, Y)
77 XY_class <- list()
78 meanPerClass <- matrix(0, ncol = K, nrow = dim(XY)[2])
79 for (r in 1:K)
80 {
81 XY_class[[r]] <- XY[model$affec == r, ]
82 if (sum(model$affec == r) == 1)
83 {
84 meanPerClass[, r] <- XY_class[[r]]
85 } else
86 {
87 meanPerClass[, r] <- apply(XY_class[[r]], 2, mean)
fb6e49cb 88 }
4c9cc558 89 }
ffdf9447
BA
90 data <- data.frame(mean = as.vector(meanPerClass), cluster = as.character(rep(1:K,
91 each = dim(XY)[2])), time = rep(1:dim(XY)[2], K))
92 g <- ggplot(data, aes(x = time, y = mean, group = cluster, color = cluster))
93 print(g + geom_line(aes(linetype = cluster, color = cluster)) + geom_point(aes(color = cluster)) +
94 ggtitle("Mean per cluster"))
4c9cc558 95
ffdf9447 96}