From: Benjamin Auder Date: Thu, 9 Mar 2017 16:22:09 +0000 (+0100) Subject: draft final form of current package X-Git-Url: https://git.auder.net/js/%7B%7B%20asset%28%27mixstore/css/current/gitweb.js?a=commitdiff_plain;h=a174b8ea1f322992068ab42810df017a2b9620ee;p=epclust.git draft final form of current package --- diff --git a/TODO b/TODO index 52b139d..53b4c97 100644 --- a/TODO +++ b/TODO @@ -65,3 +65,5 @@ réduire taille 17519 trop long ? synchrone : sum cwt : trim R part // : clever by rows retenir cwt... + +Stockage matrices : en colonnes systématiquement ? diff --git a/epclust/R/clustering.R b/epclust/R/clustering.R index 4519f44..a4c273a 100644 --- a/epclust/R/clustering.R +++ b/epclust/R/clustering.R @@ -67,8 +67,8 @@ clusteringTask1 = function( #' @rdname clustering #' @export -clusteringTask2 = function(medoids, K2, - getRefSeries, nb_ref_curves, nb_series_per_chunk, ncores_clust=1,verbose=FALSE,parll=TRUE) +clusteringTask2 = function(medoids, K2, getRefSeries, nb_ref_curves, + nb_series_per_chunk, nbytes,endian,ncores_clust=1,verbose=FALSE,parll=TRUE) { if (verbose) cat(paste("*** Clustering task 2 on ",nrow(medoids)," lines\n", sep="")) @@ -77,7 +77,7 @@ clusteringTask2 = function(medoids, K2, return (medoids) synchrones = computeSynchrones(medoids, getRefSeries, nb_ref_curves, nb_series_per_chunk, ncores_clust, verbose, parll) - distances = computeWerDists(synchrones, ncores_clust, verbose, parll) + distances = computeWerDists(synchrones, nbytes, endian, ncores_clust, verbose, parll) medoids[ computeClusters2(distances,K2,verbose), ] } @@ -203,7 +203,7 @@ computeSynchrones = function(medoids, getRefSeries, #' @return A matrix of size K1 x K1 #' #' @export -computeWerDists = function(synchrones, ncores_clust=1,verbose=FALSE,parll=TRUE) +computeWerDists = function(synchrones, nbytes,endian,ncores_clust=1,verbose=FALSE,parll=TRUE) { if (verbose) cat(paste("--- Compute WER dists\n", sep="")) @@ -218,8 +218,8 @@ computeWerDists = function(synchrones, ncores_clust=1,verbose=FALSE,parll=TRUE) #NOTE: default scalevector == 2^(0:(noctave * nvoice) / nvoice) * s0 (?) scalevector <- 2^(4:(noctave * nvoice) / nvoice + 1) #condition: ( log2(s0*w0/(2*pi)) - 1 ) * nvoice + 1.5 >= 1 - s0=2 - w0=2*pi + s0 = 2 + w0 = 2*pi scaled=FALSE s0log = as.integer( (log2( s0*w0/(2*pi) ) - 1) * nvoice + 1.5 ) totnoct = noctave + as.integer(s0log/nvoice) + 1 @@ -237,7 +237,7 @@ computeWerDists = function(synchrones, ncores_clust=1,verbose=FALSE,parll=TRUE) V = V[-1] pairs = c(pairs, lapply(V, function(v) c(i,v))) } - + computeSaveCWT = function(index) { ts <- scale(ts(synchrones[index,]), center=TRUE, scale=scaled) @@ -249,7 +249,7 @@ computeWerDists = function(synchrones, ncores_clust=1,verbose=FALSE,parll=TRUE) res <- sqres / max(Mod(sqres)) #TODO: serializer les CWT, les récupérer via getDataInFile ; #--> OK, faut juste stocker comme séries simples de taille delta*ncol (53*17519) - binarize(res, cwt_file, 100, ",", nbytes, endian) + binarize(c(as.double(Re(res)),as.double(Im(res))), cwt_file, ncol(res), ",", nbytes, endian) } if (parll) @@ -271,6 +271,8 @@ computeWerDists = function(synchrones, ncores_clust=1,verbose=FALSE,parll=TRUE) getCWT = function(index) { #from cwt_file ... + res <- getDataInFile(c(2*index-1,2*index), cwt_file, nbytes, endian) + ###############TODO: } # Distance between rows i and j diff --git a/epclust/R/main.R b/epclust/R/main.R index 9ba23ae..28217c3 100644 --- a/epclust/R/main.R +++ b/epclust/R/main.R @@ -172,8 +172,8 @@ claws = function(getSeries, K1, K2, { require("bigmemory", quietly=TRUE) medoids1 = bigmemory::as.big.matrix( getSeries(indices_medoids) ) - medoids2 = clusteringTask2(medoids1, - K2, getSeries, nb_curves, nb_series_per_chunk, ncores_clust, verbose, parll) + medoids2 = clusteringTask2(medoids1, K2, getSeries, nb_curves, nb_series_per_chunk, + nbytes, endian, ncores_clust, verbose, parll) binarize(medoids2, synchrones_file, nb_series_per_chunk, sep, nbytes, endian) return (vector("integer",0)) } @@ -235,8 +235,8 @@ claws = function(getSeries, K1, K2, indices_medoids = clusteringTask1( indices, getContribs, K1, nb_series_per_chunk, ncores_tasks*ncores_clust, verbose, parll) medoids1 = bigmemory::as.big.matrix( getSeries(indices_medoids) ) - medoids2 = clusteringTask2(medoids1, K2, - getRefSeries, nb_curves, nb_series_per_chunk, ncores_tasks*ncores_clust, verbose, parll) + medoids2 = clusteringTask2(medoids1, K2, getRefSeries, nb_curves, nb_series_per_chunk, + nbytes, endian, ncores_tasks*ncores_clust, verbose, parll) # Cleanup unlink(bin_dir, recursive=TRUE)