| 1 | context("clustering") |
| 2 | |
| 3 | test_that("computeSynchrones behave as expected", |
| 4 | { |
| 5 | n = 300 |
| 6 | x = seq(0,9.5,0.1) |
| 7 | L = length(x) #96 1/4h |
| 8 | K = 3 |
| 9 | s1 = cos(x) |
| 10 | s2 = sin(x) |
| 11 | s3 = c( s1[1:(L%/%2)] , s2[(L%/%2+1):L] ) |
| 12 | #sum((s1-s2)^2) == 96 |
| 13 | #sum((s1-s3)^2) == 58 |
| 14 | #sum((s2-s3)^2) == 38 |
| 15 | s = list(s1, s2, s3) |
| 16 | series = matrix(nrow=L, ncol=n) |
| 17 | for (i in seq_len(n)) |
| 18 | series[,i] = s[[I(i,K)]] + rnorm(L,sd=0.01) |
| 19 | getRefSeries = function(indices) { |
| 20 | indices = indices[indices <= n] |
| 21 | if (length(indices)>0) series[,indices] else NULL |
| 22 | } |
| 23 | synchrones = computeSynchrones(bigmemory::as.big.matrix(cbind(s1,s2,s3)), getRefSeries, |
| 24 | n, 100, sync_mean=TRUE, verbose=TRUE, parll=FALSE) |
| 25 | |
| 26 | expect_equal(dim(synchrones), c(L,K)) |
| 27 | for (i in 1:K) |
| 28 | expect_equal(synchrones[,i], s[[i]], tolerance=0.01) |
| 29 | }) |
| 30 | |
| 31 | # Helper function to divide indices into balanced sets |
| 32 | test_that("Helper function to spread indices work properly", |
| 33 | { |
| 34 | indices <- 1:400 |
| 35 | |
| 36 | # bigger nb_per_set than length(indices) |
| 37 | expect_equal(epclust:::.spreadIndices(indices,500), list(indices)) |
| 38 | |
| 39 | # nb_per_set == length(indices) |
| 40 | expect_equal(epclust:::.spreadIndices(indices,400), list(indices)) |
| 41 | |
| 42 | # length(indices) %% nb_per_set == 0 |
| 43 | expect_equal(epclust:::.spreadIndices(indices,200), |
| 44 | c( list(indices[1:200]), list(indices[201:400]) )) |
| 45 | expect_equal(epclust:::.spreadIndices(indices,100), |
| 46 | c( list(indices[1:100]), list(indices[101:200]), |
| 47 | list(indices[201:300]), list(indices[301:400]) )) |
| 48 | |
| 49 | # length(indices) / nb_per_set == 1, length(indices) %% nb_per_set == 100 |
| 50 | expect_equal(epclust:::.spreadIndices(indices,300), list(indices)) |
| 51 | # length(indices) / nb_per_set == 2, length(indices) %% nb_per_set == 42 |
| 52 | repartition <- epclust:::.spreadIndices(indices,179) |
| 53 | expect_equal(length(repartition), 2) |
| 54 | expect_equal(length(repartition[[1]]), 179 + 21) |
| 55 | expect_equal(length(repartition[[1]]), 179 + 21) |
| 56 | }) |
| 57 | |
| 58 | test_that("clusteringTask1 behave as expected", |
| 59 | { |
| 60 | n = 900 |
| 61 | x = seq(0,9.5,0.1) |
| 62 | L = length(x) #96 1/4h |
| 63 | K1 = 60 |
| 64 | s = lapply( seq_len(K1), function(i) x^(1+i/30)*cos(x+i) ) |
| 65 | series = matrix(nrow=L, ncol=n) |
| 66 | for (i in seq_len(n)) |
| 67 | series[,i] = s[[I(i,K1)]] + rnorm(L,sd=0.01) |
| 68 | getSeries = function(indices) { |
| 69 | indices = indices[indices <= n] |
| 70 | if (length(indices)>0) series[,indices] else NULL |
| 71 | } |
| 72 | wf = "haar" |
| 73 | ctype = "absolute" |
| 74 | getContribs = function(indices) curvesToContribs(series[,indices],wf,ctype) |
| 75 | require("cluster", quietly=TRUE) |
| 76 | browser() |
| 77 | algoClust1 = function(contribs,K) cluster::pam(contribs,K,diss=FALSE)$id.med |
| 78 | indices1 = clusteringTask1(1:n, getContribs, K1, algoClust1, 75, verbose=TRUE, parll=FALSE) |
| 79 | medoids_K1 = getSeries(indices1) |
| 80 | |
| 81 | expect_equal(dim(medoids_K1), c(L,K1)) |
| 82 | # Not easy to evaluate result: at least we expect it to be better than random selection of |
| 83 | # medoids within initial series |
| 84 | distorGood = computeDistortion(series, medoids_K1) |
| 85 | for (i in 1:3) |
| 86 | expect_lte( distorGood, computeDistortion(series,series[,sample(1:n, K1)]) ) |
| 87 | }) |
| 88 | |
| 89 | test_that("clusteringTask2 behave as expected", |
| 90 | { |
| 91 | n = 900 |
| 92 | x = seq(0,9.5,0.1) |
| 93 | L = length(x) #96 1/4h |
| 94 | K1 = 60 |
| 95 | K2 = 3 |
| 96 | #for (i in 1:60) {plot(x^(1+i/30)*cos(x+i),type="l",col=i,ylim=c(-50,50)); par(new=TRUE)} |
| 97 | s = lapply( seq_len(K1), function(i) x^(1+i/30)*cos(x+i) ) |
| 98 | series = matrix(nrow=L, ncol=n) |
| 99 | for (i in seq_len(n)) |
| 100 | series[i,] = s[[I(i,K1)]] + rnorm(L,sd=0.01) |
| 101 | getRefSeries = function(indices) { |
| 102 | indices = indices[indices <= n] |
| 103 | if (length(indices)>0) series[,indices] else NULL |
| 104 | } |
| 105 | # Artificially simulate 60 medoids - perfect situation, all equal to one of the refs |
| 106 | medoids_K1 = bigmemory::as.big.matrix( sapply( 1:K1, function(i) s[[I(i,K1)]] ) ) |
| 107 | medoids_K2 = clusteringTask2(medoids_K1, K2, getRefSeries, n, 75, verbose=TRUE, parll=FALSE) |
| 108 | |
| 109 | expect_equal(dim(medoids_K2), c(L,K2)) |
| 110 | # Not easy to evaluate result: at least we expect it to be better than random selection of |
| 111 | # medoids within 1...K1 (among references) |
| 112 | distorGood = computeDistortion(series, medoids_K2) |
| 113 | for (i in 1:3) |
| 114 | expect_lte( distorGood, computeDistortion(series,medoids_K1[,sample(1:K1, K2)]) ) |
| 115 | }) |