+ indices_all[((i-1)*nb_series_per_task+1):upper_bound]
+ })
+ cl = parallel::makeCluster(ncores_tasks)
+ # 1000*K1 indices [if WER=="end"], or empty vector [if WER=="mix"] --> series on file
+ indices = unlist( parallel::parLapply(cl, indices_tasks, function(inds) {
+ indices_medoids = clusteringTask(inds,getCoefs,K1,nb_series_per_chunk,ncores_clust)
+ if (WER=="mix")
+ {
+ medoids2 = computeClusters2(
+ getSeries(indices_medoids), K2, getSeries, nb_series_per_chunk)
+ serialize(medoids2, synchrones_file, nb_series_per_chunk, sep, nbytes, endian)
+ return (vector("integer",0))
+ }
+ indices_medoids
+ }) )
+ parallel::stopCluster(cl)
+
+ getRefSeries = getSeries
+ synchrones_file = paste(bin_dir,"synchrones",sep="") ; unlink(synchrones_file)
+ if (WER=="mix")
+ {
+ indices = seq_len(ntasks*K2)
+ #Now series must be retrieved from synchrones_file
+ getSeries = function(inds) getDataInFile(inds, synchrones_file, nbytes, endian)
+ #Coefs must be re-computed
+ unlink(coefs_file)
+ index = 1
+ repeat
+ {
+ series = getSeries((index-1)+seq_len(nb_series_per_chunk))
+ if (is.null(series))
+ break
+ coefs_chunk = curvesToCoefs(series, wf)
+ serialize(coefs_chunk, coefs_file, nb_series_per_chunk, sep, nbytes, endian)
+ index = index + nb_series_per_chunk
+ }