From: Benjamin Auder Date: Tue, 28 Mar 2017 15:35:39 +0000 (+0200) Subject: first tests for Neighbors2 after debug; TODO: some missing forecasts X-Git-Url: https://git.auder.net/js/pieces/css/parser.js?a=commitdiff_plain;h=5e838b3e17465c376ca075b766cf2543c82e9864;p=talweg.git first tests for Neighbors2 after debug; TODO: some missing forecasts --- diff --git a/pkg/DESCRIPTION b/pkg/DESCRIPTION index b932d77..8d3c4c3 100644 --- a/pkg/DESCRIPTION +++ b/pkg/DESCRIPTION @@ -1,10 +1,10 @@ Package: talweg Title: Time-Series Samples Forecasted With Exogenous Variables Version: 0.1-0 -Description: Forecast a curve sampled within the day (seconds, minutes, hours...), - using past measured curves + paste exogenous informations, - which could be some aggregated measure on the past curves, the weather... - Main starting point: computeForecast(). +Description: Forecast a curve sampled within the day (seconds, minutes, + hours...), using past measured curves + paste exogenous informations, which + could be some aggregated measure on the past curves, the weather... Main + starting point: computeForecast(). Author: Benjamin Auder [aut,cre], Jean-Michel Poggi [ctb], Bruno Portier , [ctb] @@ -22,13 +22,14 @@ Suggests: LazyData: yes URL: http://git.auder.net/?p=talweg.git License: MIT + file LICENSE -RoxygenNote: 6.0.1 -Collate: +RoxygenNote: 5.0.1 +Collate: 'A_NAMESPACE.R' 'Data.R' 'Forecaster.R' 'F_Average.R' 'F_Neighbors.R' + 'F_Neighbors2.R' 'F_Persistence.R' 'F_Zero.R' 'Forecast.R' diff --git a/pkg/R/F_Neighbors.R b/pkg/R/F_Neighbors.R index 600c5c8..5b2c899 100644 --- a/pkg/R/F_Neighbors.R +++ b/pkg/R/F_Neighbors.R @@ -33,21 +33,25 @@ NeighborsForecaster = R6::R6Class("NeighborsForecaster", # Indices of similar days for cross-validation; TODO: 45 = magic number sdays = getSimilarDaysIndices(today, limit=45, same_season=FALSE) + cv_days = intersect(fdays,sdays) + # Limit to 20 most recent matching days (TODO: 20 == magic number) + cv_days = sort(cv_days,decreasing=TRUE)[1:min(20,length(cv_days))] + # Function to optimize h : h |--> sum of prediction errors on last 45 "similar" days errorOnLastNdays = function(h, kernel, simtype) { error = 0 nb_jours = 0 - for (i in intersect(fdays,sdays)) + for (i in seq_along(cv_days)) { # mix_strategy is never used here (simtype != "mix"), therefore left blank prediction = private$.predictShapeAux(data, - fdays, i, horizon, h, kernel, simtype, FALSE) + fdays, cv_days[i], horizon, h, kernel, simtype, FALSE) if (!is.na(prediction[1])) { nb_jours = nb_jours + 1 error = error + - mean((data$getCenteredSerie(i+1)[1:horizon] - prediction)^2) + mean((data$getCenteredSerie(cv_days[i]+1)[1:horizon] - prediction)^2) } } return (error / nb_jours) @@ -96,14 +100,11 @@ NeighborsForecaster = R6::R6Class("NeighborsForecaster", h_endo = ifelse(simtype=="mix", h[1], h) # Distances from last observed day to days in the past - distances2 = rep(NA, length(fdays)) - for (i in seq_along(fdays)) - { - delta = data$getCenteredSerie(today) - data$getCenteredSerie(fdays[i]) - # Require at least half of non-NA common values to compute the distance - if ( !any( is.na(delta) ) ) - distances2[i] = mean(delta^2) - Centered} + serieToday = data$getSerie(today) + distances2 = sapply(fdays, function(i) { + delta = serieToday - data$getSerie(i) + mean(delta^2) + }) sd_dist = sd(distances2) if (sd_dist < .Machine$double.eps) @@ -136,12 +137,10 @@ NeighborsForecaster = R6::R6Class("NeighborsForecaster", sigma_inv = solve(sigma) #TODO: use pseudo-inverse if needed? # Distances from last observed day to days in the past - distances2 = rep(NA, nrow(M)-1) - for (i in 2:nrow(M)) - { - delta = M[1,] - M[i,] - distances2[i-1] = delta %*% sigma_inv %*% delta - } + distances2 = sapply(seq_along(fdays), function(i) { + delta = M[1,] - M[i+1,] + delta %*% sigma_inv %*% delta + }) sd_dist = sd(distances2) if (sd_dist < .Machine$double.eps) diff --git a/pkg/R/F_Neighbors2.R b/pkg/R/F_Neighbors2.R index 7267661..fb63e40 100644 --- a/pkg/R/F_Neighbors2.R +++ b/pkg/R/F_Neighbors2.R @@ -9,6 +9,15 @@ Neighbors2Forecaster = R6::R6Class("Neighbors2Forecaster", inherit = Forecaster, public = list( + predictSerie = function(data, today, memory, horizon, ...) + { + # Parameters (potentially) computed during shape prediction stage + predicted_shape = self$predictShape(data, today, memory, horizon, ...) +# predicted_delta = private$.pjump(data,today,memory,horizon,private$.params,...) + # Predicted shape is aligned it on the end of current day + jump +# predicted_shape+tail(data$getSerie(today),1)-predicted_shape[1]+predicted_delta + predicted_shape + }, predictShape = function(data, today, memory, horizon, ...) { # (re)initialize computed parameters @@ -22,43 +31,73 @@ Neighbors2Forecaster = R6::R6Class("Neighbors2Forecaster", fdays = getNoNA2(data, max(today-memory,1), today-1) # Get optional args + simtype = ifelse(hasArg("simtype"), list(...)$simtype, "mix") #or "endo", or "exo" kernel = ifelse(hasArg("kernel"), list(...)$kernel, "Gauss") #or "Epan" if (hasArg(h_window)) { return ( private$.predictShapeAux(data, - fdays, today, horizon, list(...)$h_window, kernel, TRUE) ) + fdays, today, horizon, list(...)$h_window, kernel, simtype, TRUE) ) } # Indices of similar days for cross-validation; TODO: 45 = magic number sdays = getSimilarDaysIndices(today, limit=45, same_season=FALSE) + cv_days = intersect(fdays,sdays) + # Limit to 20 most recent matching days (TODO: 20 == magic number) + cv_days = sort(cv_days,decreasing=TRUE)[1:min(20,length(cv_days))] + # Function to optimize h : h |--> sum of prediction errors on last 45 "similar" days - errorOnLastNdays = function(h, kernel) + errorOnLastNdays = function(h, kernel, simtype) { error = 0 nb_jours = 0 - for (day in intersect(fdays,sdays)) + for (i in seq_along(cv_days)) { # mix_strategy is never used here (simtype != "mix"), therefore left blank - prediction = private$.predictShapeAux(data,fdays,day,horizon,h,kernel,FALSE) + prediction = private$.predictShapeAux(data, + fdays, cv_days[i], horizon, h, kernel, simtype, FALSE) if (!is.na(prediction[1])) { nb_jours = nb_jours + 1 error = error + - mean((data$getSerie(i+1)[1:horizon] - prediction)^2) + mean((data$getSerie(cv_days[i]+1)[1:horizon] - prediction)^2) } } return (error / nb_jours) } - # h :: only for endo in this variation - h_best = optimize(errorOnLastNdays, c(0,7), kernel=kernel)$minimum - return (private$.predictShapeAux(data,fdays,today,horizon,h_best,kernel,TRUE)) + if (simtype != "endo") + { + h_best_exo = optimize( + errorOnLastNdays, c(0,10), kernel=kernel, simtype="exo")$minimum + } + if (simtype != "exo") + { + h_best_endo = optimize( + errorOnLastNdays, c(0,10), kernel=kernel, simtype="endo")$minimum + } + + if (simtype == "endo") + { + return (private$.predictShapeAux(data, + fdays, today, horizon, h_best_endo, kernel, "endo", TRUE)) + } + if (simtype == "exo") + { + return (private$.predictShapeAux(data, + fdays, today, horizon, h_best_exo, kernel, "exo", TRUE)) + } + if (simtype == "mix") + { + h_best_mix = c(h_best_endo,h_best_exo) + return(private$.predictShapeAux(data, + fdays, today, horizon, h_best_mix, kernel, "mix", TRUE)) + } } ), private = list( # Precondition: "today" is full (no NAs) - .predictShapeAux = function(data, fdays, today, horizon, h, kernel, final_call) + .predictShapeAux = function(data, fdays, today, horizon, h, kernel, simtype, final_call) { fdays = fdays[ fdays < today ] # TODO: 3 = magic number @@ -69,7 +108,7 @@ Neighbors2Forecaster = R6::R6Class("Neighbors2Forecaster", sdays = getSimilarDaysIndices(today, limit=45, same_season=TRUE, data) indices = intersect(fdays,sdays) levelToday = data$getLevel(today) - distances = sapply(seq_along(indices), function(i) abs(data$getLevel(i)-levelToday)) + distances = sapply(indices, function(i) abs(data$getLevel(i)-levelToday)) same_pollution = (distances <= 2) if (sum(same_pollution) < 3) #TODO: 3 == magic number { @@ -79,53 +118,79 @@ Neighbors2Forecaster = R6::R6Class("Neighbors2Forecaster", } indices = indices[same_pollution] - # Now OK: indices same season, same pollution level - # ........... - + if (simtype != "exo") + { + h_endo = ifelse(simtype=="mix", h[1], h) - # ENDO:: Distances from last observed day to days in the past - serieToday = data$getSerie(today) - distances2 = sapply(indices, function(i) { - delta = serieToday - data$getSerie(i) - distances2[i] = mean(delta^2) - }) + # Distances from last observed day to days in the past + serieToday = data$getSerie(today) + distances2 = sapply(indices, function(i) { + delta = serieToday - data$getSerie(i) + mean(delta^2) + }) - sd_dist = sd(distances2) - if (sd_dist < .Machine$double.eps) - { + sd_dist = sd(distances2) + if (sd_dist < .Machine$double.eps) + { # warning("All computed distances are very close: stdev too small") - sd_dist = 1 #mostly for tests... FIXME: + sd_dist = 1 #mostly for tests... FIXME: + } + simils_endo = + if (kernel=="Gauss") + exp(-distances2/(sd_dist*h_endo^2)) + else + { + # Epanechnikov + u = 1 - distances2/(sd_dist*h_endo^2) + u[abs(u)>1] = 0. + u + } } - simils_endo = - if (kernel=="Gauss") - exp(-distances2/(sd_dist*h_endo^2)) - else + + if (simtype != "endo") + { + h_exo = ifelse(simtype=="mix", h[2], h) + + M = matrix( nrow=1+length(indices), ncol=1+length(data$getExo(today)) ) + M[1,] = c( data$getLevel(today), as.double(data$getExo(today)) ) + for (i in seq_along(indices)) + M[i+1,] = c( data$getLevel(indices[i]), as.double(data$getExo(indices[i])) ) + + sigma = cov(M) #NOTE: robust covariance is way too slow +# sigma_inv = solve(sigma) #TODO: use pseudo-inverse if needed? + sigma_inv = MASS::ginv(sigma) +#if (final_call) browser() + # Distances from last observed day to days in the past + distances2 = sapply(seq_along(indices), function(i) { + delta = M[1,] - M[i+1,] + delta %*% sigma_inv %*% delta + }) + + sd_dist = sd(distances2) + if (sd_dist < .25 * sqrt(.Machine$double.eps)) { - # Epanechnikov - u = 1 - distances2/(sd_dist*h_endo^2) - u[abs(u)>1] = 0. - u +# warning("All computed distances are very close: stdev too small") + sd_dist = 1 #mostly for tests... FIXME: } + simils_exo = + if (kernel=="Gauss") + exp(-distances2/(sd_dist*h_exo^2)) + else + { + # Epanechnikov + u = 1 - distances2/(sd_dist*h_exo^2) + u[abs(u)>1] = 0. + u + } + } -# # EXOGENS: distances computations are enough -# # TODO: search among similar concentrations....... at this stage ?! -# M = matrix( nrow=1+length(fdays), ncol=1+length(data$getExo(today)) ) -# M[1,] = c( data$getLevel(today), as.double(data$getExo(today)) ) -# for (i in seq_along(fdays)) -# M[i+1,] = c( data$getLevel(fdays[i]), as.double(data$getExo(fdays[i])) ) -# -# sigma = cov(M) #NOTE: robust covariance is way too slow -# sigma_inv = solve(sigma) #TODO: use pseudo-inverse if needed? -# -# # Distances from last observed day to days in the past -# distances2 = rep(NA, nrow(M)-1) -# for (i in 2:nrow(M)) -# { -# delta = M[1,] - M[i,] -# distances2[i-1] = delta %*% sigma_inv %*% delta -# } - - similarities = simils_endo + similarities = + if (simtype == "exo") + simils_exo + else if (simtype == "endo") + simils_endo + else #mix + simils_endo * simils_exo prediction = rep(0, horizon) for (i in seq_along(indices)) @@ -135,8 +200,14 @@ Neighbors2Forecaster = R6::R6Class("Neighbors2Forecaster", if (final_call) { private$.params$weights <- similarities - private$.params$indices <- indices - private$.params$window <- h + private$.params$indices <- fdays + private$.params$window <- + if (simtype=="endo") + h_endo + else if (simtype=="exo") + h_exo + else #mix + c(h_endo,h_exo) } return (prediction) diff --git a/pkg/R/computeForecast.R b/pkg/R/computeForecast.R index 8cf8861..3537e8a 100644 --- a/pkg/R/computeForecast.R +++ b/pkg/R/computeForecast.R @@ -55,13 +55,28 @@ computeForecast = function(data, indices, forecaster, pjump, forecaster_class_name = getFromNamespace(paste(forecaster,"Forecaster",sep=""), "talweg") forecaster = forecaster_class_name$new( #.pjump = getFromNamespace(paste("get",pjump,"JumpPredict",sep=""), "talweg")) - for (today in integer_indices) - { - pred$append( - new_serie = forecaster$predictSerie(data, today, memory, horizon, ...), - new_params = forecaster$getParameters(), - new_index_in_data = today - ) - } + +#oo = forecaster$predictSerie(data, integer_indices[1], memory, horizon, ...) +#browser() + + library(parallel) + ppp <- parallel::mclapply(seq_along(integer_indices), function(i) { + list( + "forecast" = forecaster$predictSerie(data, integer_indices[i], memory, horizon, ...), + "params"= forecaster$getParameters(), + "index" = integer_indices[i] ) + }, mc.cores=3) + +#browser() + +for (i in seq_along(integer_indices)) +{ + pred$append( + new_serie = ppp[[i]]$forecast, + new_params = ppp[[i]]$params, + new_index_in_data = ppp[[i]]$index + ) +} + pred } diff --git a/reports/report.gj b/reports/report.gj index 3932639..aee6ad4 100644 --- a/reports/report.gj +++ b/reports/report.gj @@ -2,9 +2,9 @@

Introduction

J'ai fait quelques essais dans différentes configurations pour la méthode "Neighbors" -(la seule dont on a parlé).
Il semble que le mieux soit +(la seule dont on a parlé) et sa variante récente appelée pour l'instant "Neighbors2".
- * simtype="exo" ou "mix" : similarités exogènes avec/sans endogènes (fenêtre optimisée par VC) + * simtype="exo", "endo" ou "mix" : type de similarités (fenêtre optimisée par VC) * same_season=FALSE : les indices pour la validation croisée ne tiennent pas compte des saisons * mix_strategy="mult" : on multiplie les poids (au lieu d'en éteindre) diff --git a/reports/report.ipynb b/reports/report.ipynb index 74d6880..899fbf6 100644 --- a/reports/report.ipynb +++ b/reports/report.ipynb @@ -2,7 +2,10 @@ "cells": [ { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "deletable": true, + "editable": true + }, "source": [ "\n", "\n", @@ -29,24 +32,32 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "collapsed": false, + "deletable": true, + "editable": true + }, "outputs": [], "source": [ "library(talweg)\n", "\n", "ts_data = read.csv(system.file(\"extdata\",\"pm10_mesures_H_loc_report.csv\",package=\"talweg\"))\n", "exo_data = read.csv(system.file(\"extdata\",\"meteo_extra_noNAs.csv\",package=\"talweg\"))\n", - "data = getData(ts_data, exo_data, input_tz = \"Europe/Paris\", working_tz=\"Europe/Paris\",\n", - "\tpredict_at=7) #predict from P+1 to P+H included\n", + "# Predict from P+1 to P+H included\n", + "H = 17\n", + "data = getData(ts_data, exo_data, input_tz = \"GMT\", working_tz=\"GMT\", predict_at=7)\n", "\n", "indices_ch = seq(as.Date(\"2015-01-18\"),as.Date(\"2015-01-24\"),\"days\")\n", "indices_ep = seq(as.Date(\"2015-03-15\"),as.Date(\"2015-03-21\"),\"days\")\n", - "indices_np = seq(as.Date(\"2015-04-26\"),as.Date(\"2015-05-02\"),\"days\")\n" + "indices_np = seq(as.Date(\"2015-04-26\"),as.Date(\"2015-05-02\"),\"days\")" ] }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "deletable": true, + "editable": true + }, "source": [ "\n", "\n", @@ -56,23 +67,47 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "collapsed": false, + "deletable": true, + "editable": true + }, "outputs": [], "source": [ - "p_nn_exo = computeForecast(data, indices_ch, \"Neighbors\", \"Neighbors\",\n", - "\thorizon=3, simtype=\"exo\")\n", - "p_nn_mix = computeForecast(data, indices_ch, \"Neighbors\", \"Neighbors\",\n", - "\thorizon=3, simtype=\"mix\")\n", - "p_az = computeForecast(data, indices_ch, \"Average\", \"Zero\",\n", - "\thorizon=3)\n", - "p_pz = computeForecast(data, indices_ch, \"Persistence\", \"Zero\",\n", - "\thorizon=3, same_day=TRUE)" + "reload(\"../pkg\")\n", + "p1 = computeForecast(data, indices_ch, \"Neighbors\", \"Zero\", horizon=H, simtype=\"exo\")\n", + "p2 = computeForecast(data, indices_ch, \"Neighbors\", \"Zero\", horizon=H, simtype=\"endo\")\n", + "p3 = computeForecast(data, indices_ch, \"Neighbors\", \"Zero\", horizon=H, simtype=\"mix\")\n", + "p4 = computeForecast(data, indices_ch, \"Neighbors2\", \"Zero\", horizon=H, simtype=\"exo\")\n", + "p5 = computeForecast(data, indices_ch, \"Neighbors2\", \"Zero\", horizon=H, simtype=\"endo\")\n", + "p6 = computeForecast(data, indices_ch, \"Neighbors2\", \"Zero\", horizon=H, simtype=\"mix\")\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "e1 = computeError(data, p1, H)\n", + "e2 = computeError(data, p2, H)\n", + "e3 = computeError(data, p3, H)\n", + "e4 = computeError(data, p4, H)\n", + "e5 = computeError(data, p5, H)\n", + "e6 = computeError(data, p6, H)\n", + "plotError(list(e1,e2,e3,e4,e5,e6), cols=c(1,2,colors()[258], 4,5,6))" ] }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "collapsed": false, + "deletable": true, + "editable": true + }, "outputs": [], "source": [ "e_nn_exo = computeError(data, p_nn_exo, 3)\n", @@ -91,7 +126,11 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "collapsed": false, + "deletable": true, + "editable": true + }, "outputs": [], "source": [ "options(repr.plot.width=9, repr.plot.height=4)\n", @@ -112,7 +151,11 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "collapsed": false, + "deletable": true, + "editable": true + }, "outputs": [], "source": [ "par(mfrow=c(1,2))\n", @@ -126,7 +169,11 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "collapsed": false, + "deletable": true, + "editable": true + }, "outputs": [], "source": [ "par(mfrow=c(1,2))\n", @@ -140,7 +187,11 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "collapsed": false, + "deletable": true, + "editable": true + }, "outputs": [], "source": [ "par(mfrow=c(1,2))\n", @@ -156,7 +207,11 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "collapsed": false, + "deletable": true, + "editable": true + }, "outputs": [], "source": [ "par(mfrow=c(1,2))\n", @@ -172,7 +227,11 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "collapsed": false, + "deletable": true, + "editable": true + }, "outputs": [], "source": [ "# Fenêtres sélectionnées dans ]0,10] / endo à gauche, exo à droite\n", @@ -185,7 +244,10 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "deletable": true, + "editable": true + }, "source": [ "\n", "\n", @@ -195,7 +257,11 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "collapsed": false, + "deletable": true, + "editable": true + }, "outputs": [], "source": [ "p_nn_exo = computeForecast(data, indices_ep, \"Neighbors\", \"Neighbors\",\n", @@ -211,7 +277,11 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "collapsed": false, + "deletable": true, + "editable": true + }, "outputs": [], "source": [ "e_nn_exo = computeError(data, p_nn_exo, 3)\n", @@ -230,7 +300,11 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "collapsed": false, + "deletable": true, + "editable": true + }, "outputs": [], "source": [ "options(repr.plot.width=9, repr.plot.height=4)\n", @@ -251,7 +325,11 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "collapsed": false, + "deletable": true, + "editable": true + }, "outputs": [], "source": [ "par(mfrow=c(1,2))\n", @@ -265,7 +343,11 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "collapsed": false, + "deletable": true, + "editable": true + }, "outputs": [], "source": [ "par(mfrow=c(1,2))\n", @@ -279,7 +361,11 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "collapsed": false, + "deletable": true, + "editable": true + }, "outputs": [], "source": [ "par(mfrow=c(1,2))\n", @@ -295,7 +381,11 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "collapsed": false, + "deletable": true, + "editable": true + }, "outputs": [], "source": [ "par(mfrow=c(1,2))\n", @@ -311,7 +401,11 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "collapsed": false, + "deletable": true, + "editable": true + }, "outputs": [], "source": [ "# Fenêtres sélectionnées dans ]0,10] / endo à gauche, exo à droite\n", @@ -324,7 +418,10 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "deletable": true, + "editable": true + }, "source": [ "\n", "\n", @@ -334,7 +431,11 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "collapsed": false, + "deletable": true, + "editable": true + }, "outputs": [], "source": [ "p_nn_exo = computeForecast(data, indices_np, \"Neighbors\", \"Neighbors\",\n", @@ -350,7 +451,11 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "collapsed": false, + "deletable": true, + "editable": true + }, "outputs": [], "source": [ "e_nn_exo = computeError(data, p_nn_exo, 3)\n", @@ -369,7 +474,11 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "collapsed": false, + "deletable": true, + "editable": true + }, "outputs": [], "source": [ "options(repr.plot.width=9, repr.plot.height=4)\n", @@ -390,7 +499,11 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "collapsed": false, + "deletable": true, + "editable": true + }, "outputs": [], "source": [ "par(mfrow=c(1,2))\n", @@ -404,7 +517,11 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "collapsed": false, + "deletable": true, + "editable": true + }, "outputs": [], "source": [ "par(mfrow=c(1,2))\n", @@ -418,7 +535,11 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "collapsed": false, + "deletable": true, + "editable": true + }, "outputs": [], "source": [ "par(mfrow=c(1,2))\n", @@ -434,7 +555,11 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "collapsed": false, + "deletable": true, + "editable": true + }, "outputs": [], "source": [ "par(mfrow=c(1,2))\n", @@ -450,7 +575,11 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "collapsed": false, + "deletable": true, + "editable": true + }, "outputs": [], "source": [ "# Fenêtres sélectionnées dans ]0,10] / endo à gauche, exo à droite\n", @@ -463,7 +592,10 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "deletable": true, + "editable": true + }, "source": [ "\n", "\n",