progress in main.R
[epclust.git] / code / draft_R_pkg / R / main.R
index 695b928..6dca708 100644 (file)
@@ -30,7 +30,17 @@ epclust = function(data, K, nbSeriesPerChunk, writeTmp=ref_writeTmp, readTmp=ref
 
        #0) check arguments
        if (!is.data.frame(data) && !is.function(data))
-               tryCatch({dataCon = open(data)},
+               tryCatch(
+                       {
+                               if (is.character(data))
+                               {
+                                       dataCon = file(data, open="r")
+                               } else if (!isOpen(data))
+                               {
+                                       open(data)
+                                       dataCon = data
+                               }
+                       },
                        error="data should be a data.frame, a function or a valid connection")
        if (!is.integer(K) || K < 2)
                stop("K should be an integer greater or equal to 2")
@@ -44,25 +54,44 @@ epclust = function(data, K, nbSeriesPerChunk, writeTmp=ref_writeTmp, readTmp=ref
 
        #1) acquire data (process curves, get as coeffs)
        index = 1
-       nbCurves = nrow(data)
-       while (index < nbCurves)
+       nbCurves = 0
+       repeat
        {
                if (is.data.frame(data))
                {
                        #full data matrix
-                       writeTmp( getCoeffs( data[index:(min(index+nbSeriesPerChunk-1,nbCurves)),] ) )
+                       if (index < nrow(data))
+                       {
+                               writeTmp( getCoeffs( data[index:(min(index+nbSeriesPerChunk-1,nrow(data))),] ) )
+                       } else
+                       {
+                               break
+                       }
                } else if (is.function(data))
                {
                        #custom user function to retrieve next n curves, probably to read from DB
-                       writeTmp( getCoeffs( data(index, nbSeriesPerChunk) ) )
+                       coeffs_chunk = getCoeffs( data(index, nbSeriesPerChunk) )
+                       if (!is.null(coeffs_chunk))
+                       {
+                               writeTmp(coeffs_chunk)
+                       } else
+                       {
+                               break
+                       }
                } else
                {
                        #incremental connection
                        #TODO: find a better way to parse than using a temp file
                        ascii_lines = readLines(dataCon, nbSeriesPerChunk)
-                       seriesChunkFile = ".tmp/seriesChunk"
-                       writeLines(ascii_lines, seriesChunkFile)
-                       writeTmp( getCoeffs( read.csv(seriesChunkFile) ) )
+                       if (length(ascii_lines > 0))
+                       {
+                               seriesChunkFile = ".tmp/seriesChunk"
+                               writeLines(ascii_lines, seriesChunkFile)
+                               writeTmp( getCoeffs( read.csv(seriesChunkFile) ) )
+                       } else
+                       {
+                               break
+                       }
                }
                index = index + nbSeriesPerChunk
        }
@@ -73,9 +102,11 @@ epclust = function(data, K, nbSeriesPerChunk, writeTmp=ref_writeTmp, readTmp=ref
        ncores = ifelse(is.integer(ncores), ncores, parallel::detectCores())
        cl = parallel::makeCluster(ncores)
        parallel::clusterExport(cl=cl, varlist=c("X", "Y", "K", "p"), envir=environment())
-       li = parallel::parLapply(cl, 1:B, getParamsAtIndex)
+       library(cluster)
+       li = parallel::parLapply(cl, 1:B, )
 
-       #2) process coeffs (by nbSeriesPerChunk) and cluster in parallel (just launch async task, wait for them to complete, and re-do if necessary)
+       #2) process coeffs (by nbSeriesPerChunk) and cluster them in parallel
+       #TODO: be careful of writing to a new temp file, then flush initial one, then re-use it...
        repeat
        {
                completed = rep(FALSE, ............)
@@ -85,7 +116,7 @@ epclust = function(data, K, nbSeriesPerChunk, writeTmp=ref_writeTmp, readTmp=ref
                #C) flush tmp file (current parallel processes will write in it)
                #always check "complete" flag (array, as I did in MPI) to know if "slaves" finished
        }
-
+pam(x, k)
        parallel::stopCluster(cl)
 
        #3) readTmp last results, apply PAM on it, and return medoids + identifiers
@@ -96,8 +127,3 @@ epclust = function(data, K, nbSeriesPerChunk, writeTmp=ref_writeTmp, readTmp=ref
                #from center curves, apply stage 2...
        }
 }
-
-getCoeffs = function(series)
-{
-       #... return wavelets coeffs : compute in parallel !
-}