3 naive_f = function(link, M1,M2,M3, p,β,b)
7 λ <- sqrt(colSums(β^2))
9 # Compute β x2,3 (self) tensorial products
10 β2 = array(0, dim=c(d,d,K))
11 β3 = array(0, dim=c(d,d,d,K))
18 β2[i,j,k] = β[i,k]*β[j,k]
20 β3[i,j,l,k] = β[i,k]*β[j,k]*β[l,k]
30 term = term + p[k]*.G(link,1,λ[k],b[k])*β[i,k]
31 res = res + (term - M1[i])^2
36 term = term + p[k]*.G(link,2,λ[k],b[k])*β2[i,j,k]
37 res = res + (term - M2[i,j])^2
42 term = term + p[k]*.G(link,3,λ[k],b[k])*β3[i,j,l,k]
43 res = res + (term - M3[i,j,l])^2
50 test_that("naive computation provides the same result as vectorized computations",
52 h <- 1e-7 #for finite-difference tests
53 tol <- 5e-4 #.25 * sqrt(h) #about 7.9 e-5
54 for (dK in list( c(2,2), c(5,3)))
60 M2 = matrix(runif(d*d,-1,1), ncol=d)
61 M3 = array(runif(d*d*d,-1,1), dim=c(d,d,d))
63 for (link in c("logit","probit"))
65 op = new("OptimParams", "li"=link, "M1"=as.double(M1),
66 "M2"=as.double(M2), "M3"=as.double(M3), "K"=as.integer(K))
68 for (var in seq_len((2+d)*K-1))
72 β <- matrix(runif(d*K,-5,5),ncol=K)
74 x <- c(p[1:(K-1)],as.double(β),b)
76 # Test functions values
77 expect_equal( op$f(x), naive_f(link,M1,M2,M3, p,β,b) )
79 # Test finite differences ~= gradient values
80 dir_h <- rep(0, (2+d)*K-1)
83 expect_equal( op$grad_f(x)[var], ( op$f(x+dir_h) - op$f(x) ) / h, tol )