#' @rdname clustering
#' @export
-clusteringTask2 = function(medoids, K2,
- getRefSeries, nb_ref_curves, nb_series_per_chunk, ncores_clust=1,verbose=FALSE,parll=TRUE)
+clusteringTask2 = function(medoids, K2, getRefSeries, nb_ref_curves,
+ nb_series_per_chunk, nbytes,endian,ncores_clust=1,verbose=FALSE,parll=TRUE)
{
if (verbose)
cat(paste("*** Clustering task 2 on ",nrow(medoids)," lines\n", sep=""))
return (medoids)
synchrones = computeSynchrones(medoids,
getRefSeries, nb_ref_curves, nb_series_per_chunk, ncores_clust, verbose, parll)
- distances = computeWerDists(synchrones, ncores_clust, verbose, parll)
+ distances = computeWerDists(synchrones, nbytes, endian, ncores_clust, verbose, parll)
medoids[ computeClusters2(distances,K2,verbose), ]
}
#' @return A matrix of size K1 x K1
#'
#' @export
-computeWerDists = function(synchrones, ncores_clust=1,verbose=FALSE,parll=TRUE)
+computeWerDists = function(synchrones, nbytes,endian,ncores_clust=1,verbose=FALSE,parll=TRUE)
{
if (verbose)
cat(paste("--- Compute WER dists\n", sep=""))
#NOTE: default scalevector == 2^(0:(noctave * nvoice) / nvoice) * s0 (?)
scalevector <- 2^(4:(noctave * nvoice) / nvoice + 1)
#condition: ( log2(s0*w0/(2*pi)) - 1 ) * nvoice + 1.5 >= 1
- s0=2
- w0=2*pi
+ s0 = 2
+ w0 = 2*pi
scaled=FALSE
s0log = as.integer( (log2( s0*w0/(2*pi) ) - 1) * nvoice + 1.5 )
totnoct = noctave + as.integer(s0log/nvoice) + 1
V = V[-1]
pairs = c(pairs, lapply(V, function(v) c(i,v)))
}
-
+
computeSaveCWT = function(index)
{
ts <- scale(ts(synchrones[index,]), center=TRUE, scale=scaled)
res <- sqres / max(Mod(sqres))
#TODO: serializer les CWT, les récupérer via getDataInFile ;
#--> OK, faut juste stocker comme séries simples de taille delta*ncol (53*17519)
- binarize(res, cwt_file, 100, ",", nbytes, endian)
+ binarize(c(as.double(Re(res)),as.double(Im(res))), cwt_file, ncol(res), ",", nbytes, endian)
}
if (parll)
getCWT = function(index)
{
#from cwt_file ...
+ res <- getDataInFile(c(2*index-1,2*index), cwt_file, nbytes, endian)
+ ###############TODO:
}
# Distance between rows i and j
{
require("bigmemory", quietly=TRUE)
medoids1 = bigmemory::as.big.matrix( getSeries(indices_medoids) )
- medoids2 = clusteringTask2(medoids1,
- K2, getSeries, nb_curves, nb_series_per_chunk, ncores_clust, verbose, parll)
+ medoids2 = clusteringTask2(medoids1, K2, getSeries, nb_curves, nb_series_per_chunk,
+ nbytes, endian, ncores_clust, verbose, parll)
binarize(medoids2, synchrones_file, nb_series_per_chunk, sep, nbytes, endian)
return (vector("integer",0))
}
indices_medoids = clusteringTask1(
indices, getContribs, K1, nb_series_per_chunk, ncores_tasks*ncores_clust, verbose, parll)
medoids1 = bigmemory::as.big.matrix( getSeries(indices_medoids) )
- medoids2 = clusteringTask2(medoids1, K2,
- getRefSeries, nb_curves, nb_series_per_chunk, ncores_tasks*ncores_clust, verbose, parll)
+ medoids2 = clusteringTask2(medoids1, K2, getRefSeries, nb_curves, nb_series_per_chunk,
+ nbytes, endian, ncores_tasks*ncores_clust, verbose, parll)
# Cleanup
unlink(bin_dir, recursive=TRUE)