With sync_mean to average synchrones: bad idea, will be removed
[epclust.git] / epclust / tests / testthat / test.clustering.R
index b6231e2..e22835a 100644 (file)
@@ -1,50 +1,5 @@
 context("clustering")
 
-#shorthand: map 1->1, 2->2, 3->3, 4->1, ..., 149->2, 150->3, ... (is base==3)
-I = function(i, base)
-       (i-1) %% base + 1
-
-test_that("computeClusters1 behave as expected",
-{
-       require("MASS", quietly=TRUE)
-       library("clue", quietly=TRUE)
-
-       # 3 gaussian clusters, 300 items; and then 7 gaussian clusters, 490 items
-       n = 300
-       d = 5
-       K = 3
-       for (ndK in list( c(300,5,3), c(490,10,7) ))
-       {
-               n = ndK[1] ; d = ndK[2] ; K = ndK[3]
-               cs = n/K #cluster size
-               Id = diag(d)
-               coefs = do.call(rbind,
-                       lapply(1:K, function(i) MASS::mvrnorm(cs, c(rep(0,(i-1)),5,rep(0,d-i)), Id)))
-               indices_medoids = computeClusters1(coefs, K)
-               # Get coefs assignments (to medoids)
-               assignment = sapply(seq_len(n), function(i)
-                       which.min( rowSums( sweep(coefs[indices_medoids,],2,coefs[i,],'-')^2 ) ) )
-               for (i in 1:K)
-                       expect_equal(sum(assignment==i), cs, tolerance=5)
-
-               costs_matrix = matrix(nrow=K,ncol=K)
-               for (i in 1:K)
-               {
-                       for (j in 1:K)
-                       {
-                               # assign i (in result) to j (order 1,2,3)
-                               costs_matrix[i,j] = abs( mean(assignment[((i-1)*cs+1):(i*cs)]) - j )
-                       }
-               }
-               permutation = as.integer( clue::solve_LSAP(costs_matrix) )
-               for (i in 1:K)
-               {
-                       expect_equal(
-                               mean(assignment[((i-1)*cs+1):(i*cs)]), permutation[i], tolerance=0.05)
-               }
-       }
-})
-
 test_that("computeSynchrones behave as expected",
 {
        n = 300
@@ -58,83 +13,104 @@ test_that("computeSynchrones behave as expected",
        #sum((s1-s3)^2) == 58
        #sum((s2-s3)^2) == 38
        s = list(s1, s2, s3)
-       series = matrix(nrow=n, ncol=L)
+       series = matrix(nrow=L, ncol=n)
        for (i in seq_len(n))
-               series[i,] = s[[I(i,K)]] + rnorm(L,sd=0.01)
+               series[,i] = s[[I(i,K)]] + rnorm(L,sd=0.01)
        getRefSeries = function(indices) {
-               indices = indices[indices < n]
-               if (length(indices)>0) series[indices,] else NULL
+               indices = indices[indices <= n]
+               if (length(indices)>0) series[,indices] else NULL
        }
-       synchrones = computeSynchrones(rbind(s1,s2,s3), getRefSeries, 100)
+       synchrones = computeSynchrones(bigmemory::as.big.matrix(cbind(s1,s2,s3)), getRefSeries,
+               n, 100, sync_mean=TRUE, verbose=TRUE, parll=FALSE)
 
-       expect_equal(dim(synchrones), c(K,L))
+       expect_equal(dim(synchrones), c(L,K))
        for (i in 1:K)
-               expect_equal(synchrones[i,], s[[i]], tolerance=0.01)
+               expect_equal(synchrones[,i], s[[i]], tolerance=0.01)
 })
 
-computeDistortion = function(series, medoids)
+# Helper function to divide indices into balanced sets
+test_that("Helper function to spread indices work properly",
 {
-       n = nrow(series) ; L = ncol(series)
-       distortion = 0.
-       for (i in seq_len(n))
-               distortion = distortion + min( rowSums( sweep(medoids,2,series[i,],'-')^2 ) / L )
-       distortion / n
-}
+       indices <- 1:400
+
+       # bigger nb_per_set than length(indices)
+       expect_equal(epclust:::.spreadIndices(indices,500), list(indices))
+
+       # nb_per_set == length(indices)
+       expect_equal(epclust:::.spreadIndices(indices,400), list(indices))
+
+       # length(indices) %% nb_per_set == 0
+       expect_equal(epclust:::.spreadIndices(indices,200),
+               c( list(indices[1:200]), list(indices[201:400]) ))
+       expect_equal(epclust:::.spreadIndices(indices,100),
+               c( list(indices[1:100]), list(indices[101:200]),
+                       list(indices[201:300]), list(indices[301:400]) ))
+
+       # length(indices) / nb_per_set == 1, length(indices) %% nb_per_set == 100
+       expect_equal(epclust:::.spreadIndices(indices,300), list(indices))
+       # length(indices) / nb_per_set == 2, length(indices) %% nb_per_set == 42
+       repartition <- epclust:::.spreadIndices(indices,179)
+       expect_equal(length(repartition), 2)
+       expect_equal(length(repartition[[1]]), 179 + 21)
+       expect_equal(length(repartition[[1]]), 179 + 21)
+})
 
-test_that("computeClusters2 behave as expected",
+test_that("clusteringTask1 behave as expected",
 {
        n = 900
        x = seq(0,9.5,0.1)
        L = length(x) #96 1/4h
        K1 = 60
-       K2 = 3
-       #for (i in 1:60) {plot(x^(1+i/30)*cos(x+i),type="l",col=i,ylim=c(-50,50)); par(new=TRUE)}
        s = lapply( seq_len(K1), function(i) x^(1+i/30)*cos(x+i) )
-       series = matrix(nrow=n, ncol=L)
+       series = matrix(nrow=L, ncol=n)
        for (i in seq_len(n))
-               series[i,] = s[[I(i,K1)]] + rnorm(L,sd=0.01)
-       getRefSeries = function(indices) {
-               indices = indices[indices < n]
-               if (length(indices)>0) series[indices,] else NULL
+               series[,i] = s[[I(i,K1)]] + rnorm(L,sd=0.01)
+       getSeries = function(indices) {
+               indices = indices[indices <= n]
+               if (length(indices)>0) series[,indices] else NULL
        }
-       # Artificially simulate 60 medoids - perfect situation, all equal to one of the refs
-       medoids_K1 = do.call(rbind, lapply( 1:K1, function(i) s[[I(i,K1)]] ) )
-       medoids_K2 = computeClusters2(medoids_K1, K2, getRefSeries, 75)
+       wf = "haar"
+       ctype = "absolute"
+       getContribs = function(indices) curvesToContribs(series[,indices],wf,ctype)
+       require("cluster", quietly=TRUE)
+       algoClust1 = function(contribs,K) cluster::pam(t(contribs),K,diss=FALSE)$id.med
+       indices1 = clusteringTask1(1:n, getContribs, K1, algoClust1, 75, verbose=TRUE, parll=FALSE)
+       medoids_K1 = getSeries(indices1)
 
-       expect_equal(dim(medoids_K2), c(K2,L))
+       expect_equal(dim(medoids_K1), c(L,K1))
        # Not easy to evaluate result: at least we expect it to be better than random selection of
-       # medoids within 1...K1 (among references)
-       
-       distorGood = computeDistortion(series, medoids_K2)
+       # medoids within initial series
+       distorGood = computeDistortion(series, medoids_K1)
        for (i in 1:3)
-               expect_lte( distorGood, computeDistortion(series,medoids_K1[sample(1:K1, K2),]) )
+               expect_lte( distorGood, computeDistortion(series,series[,sample(1:n, K1)]) )
 })
 
-test_that("clusteringTask + computeClusters2 behave as expected",
+test_that("clusteringTask2 behave as expected",
 {
        n = 900
        x = seq(0,9.5,0.1)
        L = length(x) #96 1/4h
        K1 = 60
        K2 = 3
+       #for (i in 1:60) {plot(x^(1+i/30)*cos(x+i),type="l",col=i,ylim=c(-50,50)); par(new=TRUE)}
        s = lapply( seq_len(K1), function(i) x^(1+i/30)*cos(x+i) )
-       series = matrix(nrow=n, ncol=L)
+       series = matrix(nrow=L, ncol=n)
        for (i in seq_len(n))
-               series[i,] = s[[I(i,K1)]] + rnorm(L,sd=0.01)
-       getSeries = function(indices) {
+               series[,i] = s[[I(i,K1)]] + rnorm(L,sd=0.01)
+       getRefSeries = function(indices) {
                indices = indices[indices <= n]
-               if (length(indices)>0) series[indices,] else NULL
+               if (length(indices)>0) series[,indices] else NULL
        }
-       wf = "haar"
-       getCoefs = function(indices) curvesToCoefs(series[indices,],wf)
-       medoids_K1 = getSeries( clusteringTask(1:n, getCoefs, K1, 75, 4) )
-       medoids_K2 = computeClusters2(medoids_K1, K2, getSeries, 120)
+       # Artificially simulate 60 medoids - perfect situation, all equal to one of the refs
+       medoids_K1 = bigmemory::as.big.matrix( sapply( 1:K1, function(i) s[[I(i,K1)]] ) )
+       algoClust2 = function(dists,K) cluster::pam(dists,K,diss=TRUE)$id.med
+       medoids_K2 = clusteringTask2(medoids_K1, K2, algoClust2, getRefSeries,
+               n, 75, sync_mean=TRUE, verbose=TRUE, parll=FALSE)
 
-       expect_equal(dim(medoids_K1), c(K1,L))
-       expect_equal(dim(medoids_K2), c(K2,L))
+       expect_equal(dim(medoids_K2), c(L,K2))
        # Not easy to evaluate result: at least we expect it to be better than random selection of
        # medoids within 1...K1 (among references)
        distorGood = computeDistortion(series, medoids_K2)
        for (i in 1:3)
-               expect_lte( distorGood, computeDistortion(series,medoids_K1[sample(1:K1, K2),]) )
+               expect_lte( distorGood, computeDistortion(series,medoids_K1[,sample(1:K1, K2)]) )
 })