drop enercast submodule; drop Rcpp requirement; fix doc, complete code, fix fix fix
[epclust.git] / epclust / R / de_serialize.R
CommitLineData
3c5a4b08 1#' (De)Serialization of a [big]matrix or data stream
4bcfdbee 2#'
3c5a4b08
BA
3#' \code{binarize()} serializes a matrix or CSV file with minimal overhead, into a
4#' binary file. \code{getDataInFile()} achieves the inverse task: she retrieves (ASCII)
5#' data rows from indices in the binary file. Finally, \code{binarizeTransform()}
6#' serialize transformations of all data chunks. To use it a data-retrieval function
7#' must be provided -- thus \code{binarize} will most likely be used first
8#' (and then a function defined to seek in generated binary file)
4bcfdbee 9#'
3c5a4b08
BA
10#' @param data_ascii Either a matrix (by columns) or CSV file or connection (by rows)
11#' @param data_bin_file Name of binary file on output of (\code{binarize})
12#' or input of (\code{getDataInFile})
d9bb53c5 13#' @param nb_per_chunk Number of lines to process in one batch (big.matrix or connection)
492cd9e7
BA
14#' @param getData Function to retrieve data chunks
15#' @param transform Transformation function to apply on data chunks
3c5a4b08
BA
16#' @param indices Indices of the lines to retrieve
17#' @inheritParams claws
4bcfdbee
BA
18#'
19#' @return For \code{getDataInFile()}, the matrix with rows corresponding to the
492cd9e7
BA
20#' requested indices. \code{binarizeTransform} returns the number of processed lines.
21#' \code{binarize} is designed to serialize in several calls, thus returns nothing.
3c5a4b08
BA
22#'
23#' @name de_serialize
24#' @rdname de_serialize
25#' @aliases binarize binarizeTransform getDataInFile
4bcfdbee
BA
26NULL
27
28#' @rdname de_serialize
29#' @export
282342ba 30binarize <- function(data_ascii, data_bin_file, nb_per_chunk,
56857861
BA
31 sep=",", nbytes=4, endian=.Platform$endian)
32{
d9bb53c5 33 # data_ascii can be of two types: [big.]matrix, or connection
56857861 34 if (is.character(data_ascii))
282342ba 35 data_ascii <- file(data_ascii, open="r")
4bcfdbee 36 else if (methods::is(data_ascii,"connection") && !isOpen(data_ascii))
56857861 37 open(data_ascii)
282342ba 38 is_matrix <- !methods::is(data_ascii,"connection")
56857861 39
d9bb53c5
BA
40 # At first call, the length of a stored row is written. So it's important to determine
41 # if the serialization process already started.
282342ba 42 first_write <- (!file.exists(data_bin_file) || file.info(data_bin_file)$size == 0)
d9bb53c5
BA
43
44 # Open the binary file for writing (or 'append' if already exists)
282342ba 45 data_bin <- file(data_bin_file, open=ifelse(first_write,"wb","ab"))
56857861 46
56857861
BA
47 if (first_write)
48 {
d9bb53c5 49 # Write data length on first call: number of items always on 8 bytes
4bcfdbee 50 writeBin(0L, data_bin, size=8, endian=endian)
0486fbad 51 if (is_matrix)
282342ba 52 data_length <- nrow(data_ascii)
492cd9e7 53 else #connection
56857861 54 {
d9bb53c5 55 # Read the first line to know data length, and write it then
282342ba 56 data_line <- scan(data_ascii, double(), sep=sep, nlines=1, quiet=TRUE)
4bcfdbee 57 writeBin(data_line, data_bin, size=nbytes, endian=endian)
282342ba 58 data_length <- length(data_line)
56857861
BA
59 }
60 }
61
492cd9e7 62 if (is_matrix)
d9bb53c5
BA
63 {
64 # Data is processed by chunks; although this may not be so useful for (normal) matrix
65 # input, it could for a file-backed big.matrix. It's easier to follow a unified pattern.
282342ba 66 index <- 1
d9bb53c5 67 }
56857861
BA
68 repeat
69 {
0486fbad 70 if (is_matrix)
56857861 71 {
282342ba 72 data_chunk <-
eef6f6c9 73 if (index <= ncol(data_ascii))
0486fbad 74 as.double(data_ascii[,index:min(ncol(data_ascii),index+nb_per_chunk-1)])
56857861 75 else
492cd9e7 76 double(0)
282342ba 77 index <- index + nb_per_chunk
56857861 78 }
d9bb53c5 79 else #connection
282342ba 80 data_chunk <- scan(data_ascii, double(), sep=sep, nlines=nb_per_chunk, quiet=TRUE)
d9bb53c5
BA
81
82 # Data size is unknown in the case of a connection
56857861
BA
83 if (length(data_chunk)==0)
84 break
d9bb53c5
BA
85
86 # Write this chunk of data to the binary file
4bcfdbee 87 writeBin(data_chunk, data_bin, size=nbytes, endian=endian)
56857861
BA
88 }
89
90 if (first_write)
91 {
282342ba
BA
92 # Write data_length, == (file_size-1) / (nbytes*nbWritten) at offset 0 in data_bin
93 ignored <- seek(data_bin, 0)
4bcfdbee 94 writeBin(data_length, data_bin, size=8, endian=endian)
56857861
BA
95 }
96 close(data_bin)
97
492cd9e7 98 if ( ! is_matrix )
56857861
BA
99 close(data_ascii)
100}
101
492cd9e7
BA
102#' @rdname de_serialize
103#' @export
282342ba 104binarizeTransform <- function(getData, transform, data_bin_file, nb_per_chunk,
492cd9e7
BA
105 nbytes=4, endian=.Platform$endian)
106{
282342ba
BA
107 nb_items <- 0 #side-effect: store the number of transformed items
108 index <- 1
492cd9e7
BA
109 repeat
110 {
d9bb53c5 111 # Retrieve a chunk of data in a binary file (generally obtained by binarize())
282342ba 112 data_chunk <- getData((index-1)+seq_len(nb_per_chunk))
492cd9e7
BA
113 if (is.null(data_chunk))
114 break
d9bb53c5
BA
115
116 # Apply transformation on the current chunk (by columns)
282342ba 117 transformed_chunk <- transform(data_chunk)
d9bb53c5
BA
118
119 # Save the result in binary format
492cd9e7 120 binarize(transformed_chunk, data_bin_file, nb_per_chunk, ",", nbytes, endian)
d9bb53c5 121
282342ba
BA
122 index <- index + nb_per_chunk
123 nb_items <- nb_items + ncol(data_chunk)
492cd9e7 124 }
d9bb53c5 125 nb_items #number of transformed items
492cd9e7
BA
126}
127
4bcfdbee
BA
128#' @rdname de_serialize
129#' @export
282342ba 130getDataInFile <- function(indices, data_bin_file, nbytes=4, endian=.Platform$endian)
56857861 131{
282342ba 132 data_bin <- file(data_bin_file, "rb") #source binary file
d9bb53c5 133
282342ba 134 data_size <- file.info(data_bin_file)$size #number of bytes in the file
d9bb53c5 135 # data_length: length of a vector in the binary file (first element, 8 bytes)
282342ba 136 data_length <- readBin(data_bin, "integer", n=1, size=8, endian=endian)
d9bb53c5
BA
137
138 # Seek all 'indices' columns in the binary file, using data_length and nbytes
139 # to compute the offset ( index i at 8 + i*data_length*nbytes )
282342ba
BA
140 data_ascii <- do.call( cbind, lapply( indices, function(i) {
141 offset <- 8+(i-1)*data_length*nbytes
a52836b2 142 if (offset >= data_size)
0486fbad 143 return (NULL)
282342ba 144 ignored <- seek(data_bin, offset) #position cursor at computed offset
4bcfdbee 145 readBin(data_bin, "double", n=data_length, size=nbytes, endian=endian)
0486fbad 146 } ) )
56857861 147 close(data_bin)
d9bb53c5
BA
148
149 data_ascii #retrieved data, in columns
56857861 150}