# packages
library(INLA)
#> Loading required package: Matrix
#> Loading required package: sp
#> Loading required package: parallel
#> Loading required package: foreach
#> This is INLA_20.03.09 built 2020-03-09 09:12:35 UTC.
#> See www.r-inla.org/contact-us for how to get help.
inla.version()
#>
#>
#> INLA version ............: 20.03.09
#> INLA date ...............: Mon 09 Mar 2020 12:03:01 PM +03
#> INLA hgid ...............: Version_20.03.09
#> INLA-program hgid .......: Version_20.03.09
#> Maintainers .............: Havard Rue <[email protected]>
#> : Finn Lindgren <[email protected]>
#> : Daniel Simpson <[email protected]>
#> : Elias Teixeira Krainski <[email protected]>
#> : Haakon Bakka <[email protected]>
#> : Andrea Riebler <[email protected]>
#> : Geir-Arne Fuglstad <[email protected]>
#> Main web-page ...........: www.r-inla.org
#> Download-page ...........: inla.r-inla-download.org
#> Email support ...........: [email protected]
#> : [email protected]
#> Source-code .............: bitbucket.org/hrue/r-inla
# simulate poisson data using the following model:
# y_i ~ Poisson(E_i lambda_i), i = 1, ..., n
# log(lambda_i) = eta_i = 1 + x_i
set.seed(1)
n <- 200
x <- rnorm(n)
eta <- 1 + x
lambda <- exp(eta)
E <- runif(n)
y <- rpois(n, E * lambda)
# estimate linear combinations of the first 3 obs in the linear predictor. It
# should be something like eta_1 + eta_2 + eta_3
lc1 <- inla.make.lincomb(Predictor = c(1, 1, 1))
# run the model
mod <- inla(
formula = y ~ x,
family = "poisson",
data = data.frame(y, x, E),
E = E,
lincomb = lc1,
control.predictor = list(compute = TRUE)
)
summary(mod)
#>
#> Call:
#> c("inla(formula = y ~ x, family = \"poisson\", data = data.frame(y, ",
#> " x, E), E = E, lincomb = lc1, control.predictor = list(compute =
#> TRUE))" )
#> Time used:
#> Pre = 1.44, Running = 0.273, Post = 0.24, Total = 1.95
#> Fixed effects:
#> mean sd 0.025quant 0.5quant 0.975quant mode kld
#> (Intercept) 0.985 0.068 0.849 0.986 1.115 0.988 0
#> x 1.080 0.050 0.982 1.080 1.180 1.080 0
#>
#> Linear combinations (derived):
#> ID mean sd 0.025quant 0.5quant 0.975quant mode kld
#> lc 1 1.574 0.256 1.072 1.574 2.076 1.574 0
#>
#> Expected number of effective parameters(stdev): 2.00(0.00)
#> Number of equivalent replicates : 99.85
#>
#> Marginal log-Likelihood: -284.42
#> Posterior marginals for the linear predictor and
#> the fitted values are computed
# The summary of the lincomb is
mod$summary.lincomb.derived
#> ID mean sd 0.025quant 0.5quant 0.975quant mode kld
#> lc 1 1.574228 0.2556966 1.072209 1.574221 2.075827 1.574228 0
# and the mean is approximately equal to mean(eta_1) + mean(eta_2) + mean(eta_3)
sum(mod$summary.linear.predictor[1:3, "mean"])
#> [1] 1.574227
# Now I would like to estimate E_1 * exp(eta_1) + E_2 * exp(eta_2) + E_3 *
# exp(eta_3) as a proxy for Y1 + Y2 + Y3. Is that possible? The problem is that
# if I take the exp of the lincomb I obtain exp(eta_1 + eta_3 + eta_3) but I
# would need exp(eta_1) + exp(eta_2) + exp(eta_3).
# At the moment the approach that I'm using is sampling N values from each
# posterior distribution and sum all of them. Is that reasonable?
E1lambda1 <- inla.tmarginal(function(x) E[1] * exp(x), mod$marginals.linear.predictor[[1]])
E2lambda2 <- inla.tmarginal(function(x) E[2] * exp(x), mod$marginals.linear.predictor[[2]])
E3lambda3 <- inla.tmarginal(function(x) E[3] * exp(x), mod$marginals.linear.predictor[[3]])
N <- 5000
res <- inla.rmarginal(N, E1lambda1) + inla.rmarginal(N, E2lambda2) + inla.rmarginal(N, E3lambda3)
plot(density(res))
Created on 2020-06-15 by the reprex package (v0.3.0)