#' @include defaults.R #' @title Cluster power curves with PAM in parallel #' #' @description Groups electricity power curves (or any series of similar nature) by applying PAM #' algorithm in parallel to chunks of size \code{nbSeriesPerChunk} #' #' @param data Access to the data, which can be of one of the three following types: #' \itemize{ #' \item data.frame: each line contains its ID in the first cell, and all values after #' \item connection: any R connection object (e.g. a file) providing lines as described above #' \item function: a custom way to retrieve the curves; it has two arguments: the start index #' (start) and number of curves (n); see example in package vignette. #' } #' @param K Number of clusters #' @param nbSeriesPerChunk Number of series in each group #' @param writeTmp Function to write temporary wavelets coefficients (+ identifiers); #' see defaults in defaults.R #' @param readTmp Function to read temporary wavelets coefficients (see defaults.R) #' @param WER "end" to apply stage 2 after stage 1 has iterated and finished, or "mix" #' to apply it after every stage 1 #' @param ncores number of parallel processes; if NULL, use parallel::detectCores() #' #' @return A data.frame of the final medoids curves (identifiers + values) epclust = function(data, K, nbSeriesPerChunk, writeTmp=ref_writeTmp, readTmp=ref_readTmp, WER="end", ncores=NULL) { #TODO: setRefClass(...) to avoid copy data: #http://stackoverflow.com/questions/2603184/r-pass-by-reference #0) check arguments if (!is.data.frame(data) && !is.function(data)) tryCatch( { if (is.character(data)) { dataCon = file(data, open="r") } else if (!isOpen(data)) { open(data) dataCon = data } }, error="data should be a data.frame, a function or a valid connection") if (!is.integer(K) || K < 2) stop("K should be an integer greater or equal to 2") if (!is.integer(nbSeriesPerChunk) || nbSeriesPerChunk < K) stop("nbSeriesPerChunk should be an integer greater or equal to K") if (!is.function(writeTmp) || !is.function(readTmp)) stop("read/writeTmp should be functional (see defaults.R)") if (WER!="end" && WER!="mix") stop("WER takes values in {'end','mix'}") #concerning ncores, any non-integer type will be treated as "use parallel:detectCores()" #1) acquire data (process curves, get as coeffs) index = 1 nbCurves = 0 repeat { if (is.data.frame(data)) { #full data matrix if (index < nrow(data)) { writeTmp( getCoeffs( data[index:(min(index+nbSeriesPerChunk-1,nrow(data))),] ) ) } else { break } } else if (is.function(data)) { #custom user function to retrieve next n curves, probably to read from DB coeffs_chunk = getCoeffs( data(index, nbSeriesPerChunk) ) if (!is.null(coeffs_chunk)) { writeTmp(coeffs_chunk) } else { break } } else { #incremental connection #TODO: find a better way to parse than using a temp file ascii_lines = readLines(dataCon, nbSeriesPerChunk) if (length(ascii_lines > 0)) { seriesChunkFile = ".tmp/seriesChunk" writeLines(ascii_lines, seriesChunkFile) writeTmp( getCoeffs( read.csv(seriesChunkFile) ) ) } else { break } } index = index + nbSeriesPerChunk } if (exists(dataCon)) close(dataCon) library(parallel) ncores = ifelse(is.integer(ncores), ncores, parallel::detectCores()) cl = parallel::makeCluster(ncores) parallel::clusterExport(cl=cl, varlist=c("X", "Y", "K", "p"), envir=environment()) library(cluster) li = parallel::parLapply(cl, 1:B, ) #2) process coeffs (by nbSeriesPerChunk) and cluster them in parallel #TODO: be careful of writing to a new temp file, then flush initial one, then re-use it... repeat { completed = rep(FALSE, ............) #while there is jobs to do (i.e. size of tmp "file" is greater than nbSeriesPerChunk), #A) determine which tasks which processor will do (OK) #B) send each (sets of) tasks in parallel #C) flush tmp file (current parallel processes will write in it) #always check "complete" flag (array, as I did in MPI) to know if "slaves" finished } pam(x, k) parallel::stopCluster(cl) #3) readTmp last results, apply PAM on it, and return medoids + identifiers #4) apply stage 2 (in parallel ? inside task 2) ?) if (WER == "end") { #from center curves, apply stage 2... } }