merge with remote
[aggexp.git] / pkg / src / ml.predict_noNA.c
CommitLineData
a961f8a1
BA
1#include <math.h>
2#include <stdlib.h>
3
4void ml_predict_noNA(double* X, double* Y, int* n_, int* K_, double* alpha_, int* grad_, double* weight)
5{
6 int K = *K_;
7 int n = *n_;
8 double alpha = *alpha_;
9 int grad = *grad_;
10
11 //at least two experts to combine: various inits
12 double initWeight = 1. / K;
13 for (int i=0; i<K; i++)
14 weight[i] = initWeight;
15 double* error = (double*)malloc(K*sizeof(double));
16 double* cumDeltaError = (double*)calloc(K, sizeof(double));
17 double* regret = (double*)calloc(K, sizeof(double));
18
19 //start main loop
20 for (int t=0; t<n; t++ < n)
21 {
22 if (grad)
23 {
24 double hatY = 0.;
25 for (int i=0; i<K; i++)
26 hatY += X[t*K+i] * weight[i];
27 for (int i=0; i<K; i++)
28 error[i] = 2. * (hatY - Y[t]) * X[t*K+i];
29 }
30 else
31 {
32 for (int i=0; i<K; i++)
33 {
34 double delta = X[t*K+i] - Y[t];
35 error[i] = delta * delta;
36 }
37 }
38
39 double hatError = 0.;
40 for (int i=0; i<K; i++)
41 hatError += error[i] * weight[i];
42 for (int i=0; i<K; i++)
43 {
44 double deltaError = hatError - error[i];
45 cumDeltaError[i] += deltaError * deltaError;
46 regret[i] += deltaError;
47 double eta = 1. / (1. + cumDeltaError[i]);
48 weight[i] = regret[i] > 0. ? eta * regret[i] : 0.;
49 }
50
51 double sumWeight = 0.0;
52 for (int i=0; i<K; i++)
53 sumWeight += weight[i];
54 for (int i=0; i<K; i++)
55 weight[i] /= sumWeight;
56 //redistribute weights if alpha > 0 (all weights are 0 or more, sum > 0)
57 for (int i=0; i<K; i++)
58 weight[i] = (1. - alpha) * weight[i] + alpha/K;
59 }
60
61 free(error);
62 free(cumDeltaError);
63 free(regret);
64}