MiloVentimiglia
/
Bayesian-Statistics-Techniques-and-Models--University-of-California-Santa-Cruz---Coursera
Public
forked from 007v/Bayesian-Statistics-Techniques-and-Models--University-of-California-Santa-Cruz---Coursera
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathQuiz Week 4 - Lesson 11B.R
61 lines (44 loc) · 1.42 KB
/
Quiz Week 4 - Lesson 11B.R
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
# Q3
library("MASS")
data("OME")
dat = subset(OME, OME != "N/A")
dat$OME = factor(dat$OME) # relabel OME
dat$ID = as.numeric(factor(dat$ID)) # relabel ID so there are no gaps in numbers (they now go from 1 to 63)
## Original reference model and covariate matrix
mod_glm = glm(Correct/Trials ~ Age + OME + Loud + Noise, data=dat, weights=Trials, family="binomial")
X = model.matrix(mod_glm)[,-1]
## Original model (that needs to be extended)
mod_string = " model {
for (i in 1:length(y)) {
y[i] ~ dbin(phi[i], n[i])
logit(phi[i]) = a[ID[i]] + b[1]*Age[i] + b[2]*OMElow[i] + b[3]*Loud[i] + b[4]*Noiseincoherent[i]
}
for (k in 1:max(ID)) {
a[k] ~ dnorm(mu, prec_a)
}
mu ~ dnorm(0.0, 1.0/10.0^2)
prec_a ~ dgamma(1/2.0, 1*1.0/2.0)
tau = sqrt( 1.0 / prec_a )
for (j in 1:4) {
b[j] ~ dnorm(0.0, 1.0/4.0^2)
}
} "
data_jags = as.list(as.data.frame(X))
data_jags$y = dat$Correct
data_jags$n = dat$Trials
data_jags$ID = dat$ID
set.seed(116)
params = c("mu", "a", "b", "tau")
mod = jags.model(textConnection(mod_string), data=data_jags, n.chains=3)
update(mod, 1e3) # burn-in
mod_sim = coda.samples(model=mod,
variable.names=params,
n.iter=5e3)
mod_csim = as.mcmc(do.call(rbind, mod_sim)) # combine multiple chains
## convergence diagnostics
plot(mod_sim)
gelman.diag(mod_sim)
autocorr.diag(mod_sim)
autocorr.plot(mod_sim)
effectiveSize(mod_sim)
dic.samples(mod, n.iter=1e3)