## Loading required package: rjags
## Warning: package 'rjags' was built under R version 3.4.4
## Loading required package: coda
## Linked to JAGS 4.3.0
## Loaded modules: basemod,bugs
##
## Attaching package: 'R2jags'
## The following object is masked from 'package:coda':
##
## traceplot
Example of Normal Data
Define the model for jags
normmodel <- '
model{
mu ~ dnorm (0, 0.000001)
prec ~ dgamma (1, 0.000001)
for (i in 1:n)
{
x[i] ~ dnorm(mu,prec)
}
sigma <- 1/sqrt(prec)
}
'
write (normmodel, file = "normmodel.bug")
Use JAGS to simulate MCMC
# give a list of names of R objects that will be fixed during MCMC sampling
data <- list (x = rnorm (100, 100, 5), n = 100)
# define a function to generate initial values for parameters
inits <- function ()
{
list (mu = rnorm (1, 10, 0), prec = (1/10)^2)
}
# call jags to simulate MCMC
fitj <- jags (model.file = "normmodel.bug",
data = data, inits = inits,
parameters = c("mu", "sigma"),
n.chains = 4, n.thin = 1, n.burnin = 1000, n.iter = 5000)
## module glm loaded
## Compiling model graph
## Resolving undeclared variables
## Allocating nodes
## Graph information:
## Observed stochastic nodes: 100
## Unobserved stochastic nodes: 2
## Total graph size: 108
##
## Initializing model
traceplot (fitj, varname = "mu", xlim = c(1, 10))
## Inference for Bugs model at "normmodel.bug", fit using jags,
## 4 chains, each with 5000 iterations (first 1000 discarded)
## n.sims = 16000 iterations saved
## mu.vect sd.vect 2.5% 25% 50% 75% 97.5% Rhat
## mu 99.959 0.547 98.874 99.591 99.956 100.327 101.017 1.001
## sigma 5.421 0.385 4.728 5.150 5.401 5.671 6.236 1.001
## deviance 623.202 2.006 621.250 621.783 622.587 623.954 628.637 1.001
## n.eff
## mu 11000
## sigma 16000
## deviance 16000
##
## For each parameter, n.eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor (at convergence, Rhat=1).
##
## DIC info (using the rule, pD = var(deviance)/2)
## pD = 2.0 and DIC = 625.2
## DIC is an estimate of expected predictive error (lower deviance is better).