#' @include Forecaster.R #' #' Neighbors2 Forecaster #' #' Predict tomorrow as a weighted combination of "futures of the past" days. #' Inherits \code{\link{Forecaster}} #' Neighbors2Forecaster = R6::R6Class("Neighbors2Forecaster", inherit = Forecaster, public = list( predictShape = function(data, today, memory, horizon, ...) { # (re)initialize computed parameters private$.params <- list("weights"=NA, "indices"=NA, "window"=NA) # Do not forecast on days with NAs (TODO: softer condition...) if (any(is.na(data$getCenteredSerie(today)))) return (NA) # Determine indices of no-NAs days followed by no-NAs tomorrows # Indices of similar days for cross-validation; TODO: 45 = magic number fdays = intersect( getNoNA2(data, max(today-memory,1), today-1) getSimilarDaysIndices(today, limit=45, same_season=TRUE) ) # Get optional args kernel = ifelse(hasArg("kernel"), list(...)$kernel, "Gauss") #or "Epan" if (hasArg(h_window)) { return ( private$.predictShapeAux(data, fdays, today, horizon, list(...)$h_window, kernel, TRUE) ) } # Function to optimize h : h |--> sum of prediction errors on last 45 "similar" days errorOnLastNdays = function(h, kernel) { error = 0 nb_jours = 0 for (day in fdays) { # mix_strategy is never used here (simtype != "mix"), therefore left blank prediction = private$.predictShapeAux(data,fdays,day,horizon,h,kernel,FALSE) if (!is.na(prediction[1])) { nb_jours = nb_jours + 1 error = error + mean((data$getSerie(i+1)[1:horizon] - prediction)^2) } } return (error / nb_jours) } # h :: only for endo in this variation h_best = optimize(errorOnLastNdays, c(0,10), kernel=kernel)$minimum return (private$.predictShapeAux(data,fdays,today,horizon,h_best,kernel,TRUE)) } ), private = list( # Precondition: "today" is full (no NAs) .predictShapeAux = function(data, fdays, today, horizon, h, kernel, final_call) { fdays = fdays[ fdays < today ] # TODO: 3 = magic number if (length(fdays) < 3) return (NA) # ENDO:: Distances from last observed day to days in the past distances2 = rep(NA, length(fdays)) for (i in seq_along(fdays)) { delta = data$getSerie(today) - data$getSerie(fdays[i]) # Require at least half of non-NA common values to compute the distance if ( !any( is.na(delta) ) ) distances2[i] = mean(delta^2) } sd_dist = sd(distances2) if (sd_dist < .Machine$double.eps) { # warning("All computed distances are very close: stdev too small") sd_dist = 1 #mostly for tests... FIXME: } simils_endo = if (kernel=="Gauss") exp(-distances2/(sd_dist*h_endo^2)) else { # Epanechnikov u = 1 - distances2/(sd_dist*h_endo^2) u[abs(u)>1] = 0. u } # EXOGENS: distances computations are enough # TODO: search among similar concentrations....... at this stage ?! M = matrix( nrow=1+length(fdays), ncol=1+length(data$getExo(today)) ) M[1,] = c( data$getLevel(today), as.double(data$getExo(today)) ) for (i in seq_along(fdays)) M[i+1,] = c( data$getLevel(fdays[i]), as.double(data$getExo(fdays[i])) ) sigma = cov(M) #NOTE: robust covariance is way too slow sigma_inv = solve(sigma) #TODO: use pseudo-inverse if needed? # Distances from last observed day to days in the past distances2 = rep(NA, nrow(M)-1) for (i in 2:nrow(M)) { delta = M[1,] - M[i,] distances2[i-1] = delta %*% sigma_inv %*% delta } ppv <- sort(distances2, index.return=TRUE)$ix[1:10] #.............. #PPV pour endo ? similarities = if (simtype == "exo") simils_exo else if (simtype == "endo") simils_endo else #mix simils_endo * simils_exo prediction = rep(0, horizon) for (i in seq_along(fdays)) prediction = prediction + similarities[i] * data$getSerie(fdays[i]+1)[1:horizon] prediction = prediction / sum(similarities, na.rm=TRUE) if (final_call) { private$.params$weights <- similarities private$.params$indices <- fdays private$.params$window <- h } return (prediction) } ) )