context("clustering") test_that("computeSynchrones behave as expected", { # Generate 300 sinusoïdal series of 3 kinds: all series of indices == 0 mod 3 are the same # (plus noise), all series of indices == 1 mod 3 are the same (plus noise) ... n = 300 x = seq(0,9.5,0.1) L = length(x) #96 1/4h K = 3 s1 = cos(x) s2 = sin(x) s3 = c( s1[1:(L%/%2)] , s2[(L%/%2+1):L] ) #sum((s1-s2)^2) == 96 #sum((s1-s3)^2) == 58 #sum((s2-s3)^2) == 38 s = list(s1, s2, s3) series = matrix(nrow=L, ncol=n) for (i in seq_len(n)) series[,i] = s[[I(i,K)]] + rnorm(L,sd=0.01) getRefSeries = function(indices) { indices = indices[indices <= n] if (length(indices)>0) as.matrix(series[,indices]) else NULL } synchrones = computeSynchrones(bigmemory::as.big.matrix(cbind(s1,s2,s3)), getRefSeries, n, 100, verbose=TRUE, parll=FALSE) expect_equal(dim(synchrones), c(L,K)) for (i in 1:K) { # Synchrones are (for each medoid) sums of closest curves. # Here, we expect exactly 100 curves of each kind to be assigned respectively to # synchrone 1, 2 and 3 => division by 100 should be very close to the ref curve expect_equal(synchrones[,i]/100, s[[i]], tolerance=0.01) } }) test_that("Helper function to spread indices work properly", { indices <- 1:400 # bigger nb_per_set than length(indices) expect_equal(epclust:::.spreadIndices(indices,500), list(indices)) # nb_per_set == length(indices) expect_equal(epclust:::.spreadIndices(indices,400), list(indices)) # length(indices) %% nb_per_set == 0 expect_equal(epclust:::.spreadIndices(indices,200), c( list(indices[1:200]), list(indices[201:400]) )) expect_equal(epclust:::.spreadIndices(indices,100), c( list(indices[1:100]), list(indices[101:200]), list(indices[201:300]), list(indices[301:400]) )) # length(indices) / nb_per_set == 1, length(indices) %% nb_per_set == 100 expect_equal(epclust:::.spreadIndices(indices,300), list(indices)) # length(indices) / nb_per_set == 2, length(indices) %% nb_per_set == 42 repartition <- epclust:::.spreadIndices(indices,179) expect_equal(length(repartition), 2) expect_equal(length(repartition[[1]]), 179 + 21) expect_equal(length(repartition[[1]]), 179 + 21) }) test_that("clusteringTask1 behave as expected", { # Generate 60 reference sinusoïdal series (medoids to be found), # and sample 900 series around them (add a small noise) n = 900 x = seq(0,9.5,0.1) L = length(x) #96 1/4h K1 = 60 s = lapply( seq_len(K1), function(i) x^(1+i/30)*cos(x+i) ) series = matrix(nrow=L, ncol=n) for (i in seq_len(n)) series[,i] = s[[I(i,K1)]] + rnorm(L,sd=0.01) getSeries = function(indices) { indices = indices[indices <= n] if (length(indices)>0) as.matrix(series[,indices]) else NULL } wf = "haar" ctype = "absolute" getContribs = function(indices) curvesToContribs(series[,indices],wf,ctype) require("cluster", quietly=TRUE) algoClust1 = function(contribs,K) cluster::pam(t(contribs),K,diss=FALSE)$id.med indices1 = clusteringTask1(1:n, getContribs, K1, algoClust1, 75, verbose=TRUE, parll=FALSE) medoids_K1 = getSeries(indices1) expect_equal(dim(medoids_K1), c(L,K1)) # Not easy to evaluate result: at least we expect it to be better than random selection of # medoids within initial series distor_good = computeDistortion(series, medoids_K1) for (i in 1:3) expect_lte( distor_good, computeDistortion(series,series[,sample(1:n, K1)]) ) }) test_that("clusteringTask2 behave as expected", { skip("Unexplained failure") # Same 60 reference sinusoïdal series than in clusteringTask1 test, # but this time we consider them as medoids - skipping stage 1 # Here also we sample 900 series around the 60 "medoids" n = 900 x = seq(0,9.5,0.1) L = length(x) #96 1/4h K1 = 60 K2 = 3 #for (i in 1:60) {plot(x^(1+i/30)*cos(x+i),type="l",col=i,ylim=c(-50,50)); par(new=TRUE)} s = lapply( seq_len(K1), function(i) x^(1+i/30)*cos(x+i) ) series = matrix(nrow=L, ncol=n) for (i in seq_len(n)) series[,i] = s[[I(i,K1)]] + rnorm(L,sd=0.01) getRefSeries = function(indices) { indices = indices[indices <= n] if (length(indices)>0) as.matrix(series[,indices]) else NULL } # Perfect situation: all medoids "after stage 1" are good. medoids_K1 = bigmemory::as.big.matrix( sapply( 1:K1, function(i) s[[I(i,K1)]] ) ) algoClust2 = function(dists,K) cluster::pam(dists,K,diss=TRUE)$id.med medoids_K2 = clusteringTask2(medoids_K1, K2, algoClust2, getRefSeries, n, 75, 4, 8, "little", verbose=TRUE, parll=FALSE) expect_equal(dim(medoids_K2), c(L,K2)) # Not easy to evaluate result: at least we expect it to be better than random selection of # synchrones within 1...K1 (from where distances computations + clustering was run) synchrones = computeSynchrones(medoids_K1,getRefSeries,n,75,verbose=FALSE,parll=FALSE) distor_good = computeDistortion(synchrones, medoids_K2) for (i in 1:3) expect_lte( distor_good, computeDistortion(synchrones, synchrones[,sample(1:K1,3)]) ) })