X-Git-Url: https://git.auder.net/?a=blobdiff_plain;f=pkg%2FR%2FF_Neighbors.R;h=02536ebf164c9634c6bb7a6f23eeb3cf27fbbea9;hb=638f27f4296727aff62b56643beb9f42aa5b57ef;hp=ffb6d371df57a910451f0a79ffeed340886132ff;hpb=99f83c9af27492f6fb9b10f51fb8704ed588f5c1;p=talweg.git diff --git a/pkg/R/F_Neighbors.R b/pkg/R/F_Neighbors.R index ffb6d37..02536eb 100644 --- a/pkg/R/F_Neighbors.R +++ b/pkg/R/F_Neighbors.R @@ -1,259 +1,311 @@ -#' @include Forecaster.R +#' Neighbors Forecaster #' -#' @title Neighbors Forecaster +#' Predict next serie as a weighted combination of "futures of the past" days, +#' where days in the past are chosen and weighted according to some similarity measures. #' -#' @description Predict tomorrow as a weighted combination of "futures of the past" days. -#' Inherits \code{\link{Forecaster}} -NeighborsForecaster = setRefClass( - Class = "NeighborsForecaster", - contains = "Forecaster", +#' The main method is \code{predictShape()}, taking arguments data, today, memory, +#' predict_from, horizon respectively for the dataset (object output of +#' \code{getData()}), the current index, the data depth (in days), the first predicted +#' hour and the last predicted hour. +#' In addition, optional arguments can be passed: +#' \itemize{ +#' \item local : TRUE (default) to constrain neighbors to be "same days within same +#' season" +#' \item simtype : 'endo' for a similarity based on the series only, +#' 'exo' for a similarity based on exogenous variables only, +#' 'mix' for the product of 'endo' and 'exo', +#' 'none' (default) to apply a simple average: no computed weights +#' \item window : A window for similarities computations; override cross-validation +#' window estimation. +#' } +#' The method is summarized as follows: +#' \enumerate{ +#' \item Determine N (=20) recent days without missing values, and followed by a +#' tomorrow also without missing values. +#' \item Optimize the window parameters (if relevant) on the N chosen days. +#' \item Considering the optimized window, compute the neighbors (with locality +#' constraint or not), compute their similarities -- using a gaussian kernel if +#' simtype != "none" -- and average accordingly the "tomorrows of neigbors" to +#' obtain the final prediction. +#' } +#' +#' @usage # NeighborsForecaster$new(pjump) +#' +#' @docType class +#' @format R6 class, inherits Forecaster +#' @aliases F_Neighbors +#' +NeighborsForecaster = R6::R6Class("NeighborsForecaster", + inherit = Forecaster, - methods = list( - initialize = function(...) - { - callSuper(...) - }, - predictShape = function(today, memory, horizon, ...) + public = list( + predictShape = function(data, today, memory, predict_from, horizon, ...) { # (re)initialize computed parameters - params <<- list("weights"=NA, "indices"=NA, "window"=NA) + private$.params <- list("weights"=NA, "indices"=NA, "window"=NA) - first_day = max(today - memory, 1) - # The first day is generally not complete: - if (length(data$getCenteredSerie(1)) < length(data$getCenteredSerie(2))) - first_day = 2 - - # Predict only on (almost) non-NAs days - nas_in_serie = is.na(data$getSerie(today)) - if (any(nas_in_serie)) + # Do not forecast on days with NAs (TODO: softer condition...) + if (any(is.na(data$getSerie(today-1))) || + (predict_from>=2 && any(is.na(data$getSerie(today)[1:(predict_from-1)])))) { - #TODO: better define "repairing" conditions (and method) - if (sum(nas_in_serie) >= length(nas_in_serie) / 2) - return (NA) - for (i in seq_along(nas_in_serie)) - { - if (nas_in_serie[i]) - { - #look left - left = i-1 - while (left>=1 && nas_in_serie[left]) - left = left-1 - #look right - right = i+1 - while (right<=length(nas_in_serie) && nas_in_serie[right]) - right = right+1 - #HACK: modify by-reference Data object... - data$data[[today]]$serie[i] <<- - if (left==0) data$data[[today]]$serie[right] - else if (right==0) data$data[[today]]$serie[left] - else (data$data[[today]]$serie[left] + data$data[[today]]$serie[right]) / 2. - } - } + return (NA) } - # Determine indices of no-NAs days followed by no-NAs tomorrows - fdays_indices = c() - for (i in first_day:(today-1)) + # Get optional args + local = ifelse(hasArg("local"), list(...)$local, TRUE) #same level + season? + simtype = ifelse(hasArg("simtype"), list(...)$simtype, "none") #or "endo", or "exo" + opera = ifelse(hasArg("opera"), list(...)$opera, FALSE) #operational mode? + + # Determine indices of no-NAs days preceded by no-NAs yerstedays + tdays = .getNoNA2(data, max(today-memory,2), ifelse(opera,today-1,data$getSize())) + if (!opera) + tdays = setdiff(tdays, today) #always exclude current day + + # Shortcut if window is known + if (hasArg("window")) { - if ( !any(is.na(data$getSerie(i)) | is.na(data$getSerie(i+1))) ) - fdays_indices = c(fdays_indices, i) + return ( private$.predictShapeAux(data, tdays, today, predict_from, horizon, + local, list(...)$window, simtype, opera, TRUE) ) } - #GET OPTIONAL PARAMS - # Similarity computed with exogenous variables ? endogenous ? both ? ("exo","endo","mix") - simtype = ifelse(hasArg("simtype"), list(...)$simtype, "mix") - simthresh = ifelse(hasArg("simthresh"), list(...)$simthresh, 0.) - kernel = ifelse(hasArg("kernel"), list(...)$kernel, "Gauss") #or "Epan" - mix_strategy = ifelse(hasArg("mix_strategy"), list(...)$mix_strategy, "mult") #or "neighb" - same_season = ifelse(hasArg("same_season"), list(...)$same_season, FALSE) - if (hasArg(h_window)) - return (.predictShapeAux(fdays_indices, today, horizon, list(...)$h_window, kernel, - simtype, simthresh, mix_strategy, TRUE)) - #END GET - - # Indices for cross-validation; TODO: 45 = magic number - indices = getSimilarDaysIndices(today, limit=45, same_season=same_season) - if (tail(indices,1) == 1) - indices = head(indices,-1) + # Indices of similar days for cross-validation; TODO: 20 = magic number + cv_days = getSimilarDaysIndices(today, data, limit=20, same_season=FALSE, + days_in=tdays, operational=opera) - # Function to optimize h : h |--> sum of prediction errors on last 45 "similar" days - errorOnLastNdays = function(h, kernel, simtype) + # Optimize h : h |--> sum of prediction errors on last N "similar" days + errorOnLastNdays = function(window, simtype) { error = 0 nb_jours = 0 - for (i in indices) + for (i in seq_along(cv_days)) { - # NOTE: predict only on non-NAs days followed by non-NAs (TODO:) - if (!any(is.na(data$getSerie(i)) | is.na(data$getSerie(i+1)))) + # mix_strategy is never used here (simtype != "mix"), therefore left blank + prediction = private$.predictShapeAux(data, tdays, cv_days[i], predict_from, + horizon, local, window, simtype, opera, FALSE) + if (!is.na(prediction[1])) { nb_jours = nb_jours + 1 - # mix_strategy is never used here (simtype != "mix"), therefore left blank - prediction = .predictShapeAux(fdays_indices, i, horizon, h, kernel, simtype, - simthresh, "", FALSE) - if (!is.na(prediction[1])) - error = error + mean((data$getCenteredSerie(i+1)[1:horizon] - prediction)^2) + error = error + + mean((data$getSerie(cv_days[i])[predict_from:horizon] - prediction)^2) } } return (error / nb_jours) } - h_best_exo = 1. - if (simtype != "endo" && !(simtype=="mix" && mix_strategy=="neighb")) + # TODO: 7 == magic number + if (simtype=="endo" || simtype=="mix") { - h_best_exo = optimize(errorOnLastNdays, interval=c(0,10), kernel=kernel, - simtype="exo")$minimum + best_window_endo = optimize( + errorOnLastNdays, c(0,7), simtype="endo")$minimum } - if (simtype != "exo") + if (simtype=="exo" || simtype=="mix") { - h_best_endo = optimize(errorOnLastNdays, interval=c(0,10), kernel=kernel, - simtype="endo")$minimum + best_window_exo = optimize( + errorOnLastNdays, c(0,7), simtype="exo")$minimum } - if (simtype == "endo") - { - return (.predictShapeAux(fdays_indices, today, horizon, h_best_endo, kernel, "endo", - simthresh, "", TRUE)) - } - if (simtype == "exo") - { - return (.predictShapeAux(fdays_indices, today, horizon, h_best_exo, kernel, "exo", - simthresh, "", TRUE)) - } - if (simtype == "mix") - { - return (.predictShapeAux(fdays_indices, today, horizon, c(h_best_endo,h_best_exo), - kernel, "mix", simthresh, mix_strategy, TRUE)) - } - }, - # Precondition: "today" is full (no NAs) - .predictShapeAux = function(fdays_indices, today, horizon, h, kernel, simtype, simthresh, - mix_strategy, final_call) - { - dat = data$data #HACK: faster this way... + best_window = + if (simtype == "endo") + best_window_endo + else if (simtype == "exo") + best_window_exo + else if (simtype == "mix") + c(best_window_endo,best_window_exo) + else #none: value doesn't matter + 1 - fdays_indices = fdays_indices[fdays_indices < today] - # TODO: 3 = magic number - if (length(fdays_indices) < 3) + return( private$.predictShapeAux(data, tdays, today, predict_from, horizon, local, + best_window, simtype, opera, TRUE) ) + } + ), + private = list( + # Precondition: "yersteday until predict_from-1" is full (no NAs) + .predictShapeAux = function(data, tdays, today, predict_from, horizon, local, window, + simtype, opera, final_call) + { + tdays_cut = tdays[ tdays != today ] + if (length(tdays_cut) == 0) return (NA) - if (simtype != "exo") + if (local) { - h_endo = ifelse(simtype=="mix", h[1], h) - - # Distances from last observed day to days in the past - distances2 = rep(NA, length(fdays_indices)) - for (i in seq_along(fdays_indices)) + # TODO: 60 == magic number + tdays = getSimilarDaysIndices(today, data, limit=60, same_season=TRUE, + days_in=tdays_cut, operational=opera) +# if (length(tdays) <= 1) +# return (NA) + # TODO: 10 == magic number + tdays = .getConstrainedNeighbs(today, data, tdays, min_neighbs=10) + if (length(tdays) == 1) { - delta = dat[[today]]$serie - dat[[ fdays_indices[i] ]]$serie - # Require at least half of non-NA common values to compute the distance - if (sum(is.na(delta)) <= 0) #length(delta)/2) - distances2[i] = mean(delta^2) #, na.rm=TRUE) - } - - sd_dist = sd(distances2) - if (sd_dist < .Machine$double.eps) - sd_dist = 1 #mostly for tests... FIXME: - simils_endo = - if (kernel=="Gauss") - exp(-distances2/(sd_dist*h_endo^2)) - else { #Epanechnikov - u = 1 - distances2/(sd_dist*h_endo^2) - u[abs(u)>1] = 0. - u + if (final_call) + { + private$.params$weights <- 1 + private$.params$indices <- tdays + private$.params$window <- 1 } + return ( data$getSerie(tdays[1])[predict_from:horizon] ) + } } + else + tdays = tdays_cut #no conditioning - if (simtype != "endo") + if (simtype == "endo" || simtype == "mix") { - h_exo = ifelse(simtype=="mix", h[2], h) - - M = matrix( nrow=1+length(fdays_indices), ncol=1+length(dat[[today]]$exo) ) - M[1,] = c( dat[[today]]$level, as.double(dat[[today]]$exo) ) - for (i in seq_along(fdays_indices)) - { - M[i+1,] = c( dat[[ fdays_indices[i] ]]$level, - as.double(dat[[ fdays_indices[i] ]]$exo) ) - } + # Compute endogen similarities using given window + window_endo = ifelse(simtype=="mix", window[1], window) - sigma = cov(M) #NOTE: robust covariance is way too slow - sigma_inv = solve(sigma) #TODO: use pseudo-inverse if needed? + # Distances from last observed day to selected days in the past + distances2 <- .computeDistsEndo(data, today, tdays, predict_from) - # Distances from last observed day to days in the past - distances2 = rep(NA, nrow(M)-1) - for (i in 2:nrow(M)) + if (local) { - delta = M[1,] - M[i,] - distances2[i-1] = delta %*% sigma_inv %*% delta + max_neighbs = 12 #TODO: 12 = arbitrary number + if (length(distances2) > max_neighbs) + { + ordering <- order(distances2) + tdays <- tdays[ ordering[1:max_neighbs] ] + distances2 <- distances2[ ordering[1:max_neighbs] ] + } } - sd_dist = sd(distances2) - simils_exo = - if (kernel=="Gauss") { - exp(-distances2/(sd_dist*h_exo^2)) - } else { #Epanechnikov - u = 1 - distances2/(sd_dist*h_exo^2) - u[abs(u)>1] = 0. - u - } + simils_endo <- .computeSimils(distances2, window_endo) } - if (simtype=="mix") + if (simtype == "exo" || simtype == "mix") { - if (mix_strategy == "neighb") - { - #Only (60) most similar days according to exogen variables are kept into consideration - #TODO: 60 = magic number - keep_indices = sort(simils_exo, index.return=TRUE)$ix[1:(min(60,length(simils_exo)))] - simils_endo[-keep_indices] = 0. - } - else #mix_strategy == "mult" - simils_endo = simils_endo * simils_exo + # Compute exogen similarities using given window + window_exo = ifelse(simtype=="mix", window[2], window) + + distances2 <- .computeDistsExo(data, today, tdays) + + simils_exo <- .computeSimils(distances2, window_exo) } similarities = - if (simtype != "exo") { - simils_endo - } else { + if (simtype == "exo") simils_exo - } + else if (simtype == "endo") + simils_endo + else if (simtype == "mix") + simils_endo * simils_exo + else #none + rep(1, length(tdays)) + similarities = similarities / sum(similarities) - if (simthresh > 0.) + prediction = rep(0, horizon-predict_from+1) + for (i in seq_along(tdays)) { - max_sim = max(similarities) - # Set to 0 all similarities s where s / max_sim < simthresh, but keep at least 60 - ordering = sort(similarities / max_sim, index.return=TRUE) - if (ordering[60] < simthresh) - { - similarities[ ordering$ix[ - (1:60) ] ] = 0. - } else - { - limit = 61 - while (limit < length(similarities) && ordering[limit] >= simthresh) - limit = limit + 1 - similarities[ ordering$ix[ - 1:limit] ] = 0. - } + prediction = prediction + + similarities[i] * data$getSerie(tdays[i])[predict_from:horizon] } - prediction = rep(0, horizon) - for (i in seq_along(fdays_indices)) - prediction = prediction + similarities[i] * dat[[ fdays_indices[i]+1 ]]$serie[1:horizon] - prediction = prediction / sum(similarities, na.rm=TRUE) - if (final_call) { - params$weights <<- similarities - params$indices <<- fdays_indices - params$window <<- - if (simtype=="endo") { - h_endo - } else if (simtype=="exo") { - h_exo - } else { - c(h_endo,h_exo) - } + private$.params$weights <- similarities + private$.params$indices <- tdays + private$.params$window <- + if (simtype=="endo") + window_endo + else if (simtype=="exo") + window_exo + else if (simtype=="mix") + c(window_endo,window_exo) + else #none + 1 } return (prediction) } ) ) + +# getConstrainedNeighbs +# +# Get indices of neighbors of similar pollution level (among same season + day type). +# +# @param today Index of current day +# @param data Object of class Data +# @param tdays Current set of "second days" (no-NA pairs) +# @param min_neighbs Minimum number of points in a neighborhood +# @param max_neighbs Maximum number of points in a neighborhood +# +.getConstrainedNeighbs = function(today, data, tdays, min_neighbs=10) +{ + levelToday = data$getLevelHat(today) +# levelYersteday = data$getLevel(today-1) + distances = sapply(tdays, function(i) { +# sqrt((data$getLevel(i-1)-levelYersteday)^2 + (data$getLevel(i)-levelToday)^2) + abs(data$getLevel(i)-levelToday) + }) + #TODO: 1, +1, +3 : magic numbers + dist_thresh = 1 + min_neighbs = min(min_neighbs,length(tdays)) + repeat + { + same_pollution = (distances <= dist_thresh) + nb_neighbs = sum(same_pollution) + if (nb_neighbs >= min_neighbs) #will eventually happen + break + dist_thresh = dist_thresh + ifelse(dist_thresh>1,3,1) + } + tdays = tdays[same_pollution] +# max_neighbs = 12 +# if (nb_neighbs > max_neighbs) +# { +# # Keep only max_neighbs closest neighbors +# tdays = tdays[ order(distances[same_pollution])[1:max_neighbs] ] +# } + tdays +} + +# compute similarities +# +# Apply the gaussian kernel on computed squared distances. +# +# @param distances2 Squared distances +# @param window Window parameter for the kernel +# +.computeSimils <- function(distances2, window) +{ + sd_dist = sd(distances2) + if (sd_dist < .25 * sqrt(.Machine$double.eps)) + { +# warning("All computed distances are very close: stdev too small") + sd_dist = 1 #mostly for tests... FIXME: + } + exp(-distances2/(sd_dist*window^2)) +} + +.computeDistsEndo <- function(data, today, tdays, predict_from) +{ + lastSerie = c( data$getSerie(today-1), + data$getSerie(today)[if (predict_from>=2) 1:(predict_from-1) else c()] ) + sapply(tdays, function(i) { + delta = lastSerie - c(data$getSerie(i-1), + data$getSerie(i)[if (predict_from>=2) 1:(predict_from-1) else c()]) + sqrt(mean(delta^2)) + }) +} + +.computeDistsExo <- function(data, today, tdays) +{ + M = matrix( ncol=1+length(tdays), nrow=1+length(data$getExo(1)) ) + M[,1] = c( data$getLevelHat(today), as.double(data$getExoHat(today)) ) + for (i in seq_along(tdays)) + M[,i+1] = c( data$getLevel(tdays[i]), as.double(data$getExo(tdays[i])) ) + + sigma = cov(t(M)) #NOTE: robust covariance is way too slow + # TODO: 10 == magic number; more robust way == det, or always ginv() + sigma_inv = + if (length(tdays) > 10) + solve(sigma) + else + MASS::ginv(sigma) + + # Distances from last observed day to days in the past + sapply(seq_along(tdays), function(i) { + delta = M[,1] - M[,i+1] + delta %*% sigma_inv %*% delta + }) +}