import numpy as np
### Input sample sizes and observed x
x = np.array([2,0,1,2,2,3,0,1], float)
n = np.array([2,4,20,8,15,13,10,13])
PyMC 2 implementation
from pymc import *
def basket_model():
# Prior for theta
theta = Normal("theta", 0, 0.0001, value=0)
# Prior for sigma
sigma = Uniform("sigma", 0, 1000, value=1)
tau = sigma**-2
thetas = Normal("thetas", theta, tau, value=[0]*8)
p = Lambda("p", lambda t=thetas: invlogit(t))
# Likelihood
like = Binomial("like", n=n, p=p, value=x, observed=True)
return(locals())
%%timeit
M = MCMC(basket_model())
M.sample(iter=5000, burn=1000, progress_bar=False)
PyMC 3 implementation
###################### Model
from pymc import *
import theano.tensor as t
def tinvlogit(x):
return t.exp(x) / (1 + t.exp(x))
p_true
# theta_true = -2
# sigma_true = 2
# thetas_true = np.random.normal(theta_true, sigma_true, size=8)
# p_true = invlogit(thetas_true)
# print('p_true', p_true)
#
# x = rbinomial(n, p_true)
with Model() as model:
# Prior for theta
theta = Normal("theta", 0, 0.0001)
# Prior for sigma
sigma = Uniform("s", 0, 1000)
tau = sigma**-2
thetas = Normal("thetas", theta, tau, shape=8)
p = Deterministic('p', tinvlogit(thetas))
# Likelihood
like = Binomial("like", n=n, p=p, observed=x)
# Run the model
step1 = Slice(vars=[theta, sigma])
step2 = NUTS(vars=[thetas])
trace = sample(50000, (step1, step2))
summary(trace, vars=[p])
R implementation
%load_ext rpy2.ipython
%%R
###############################################################################
## Simple Bayesian Hierarchical model for proportions using JAGS
## Author: Ben Saville
## Date: July 2014
###############################################################################
library(rjags)
library(rbenchmark)
#library(rmeta)
#library(graphics)
### Input sample sizes and observed x
x = c(2,0,1,2,2,3,0,1)
n = c(2,4,20,8,15,13,10,13)
z = c(1,1,1,1,0,0,0,0)
p.obs <- x/n
p.obs
################# df for JAGS
df = list( 'x' = x,
'n' = n)
############################# Fitting model with JAGS ############################################
################# Inital values
inits1 = list( sigma = 1, theta=0, thetas=rep(0,8)) # No seed
#inits1 = list( beta0 = 0, beta1 = 0, tau = 1, .RNG.seed=1, .RNG.name='base::Wichmann-Hill') # Sets seed
################# Conjugate model ############################
## Create a txt file for use by JAGS
fileForJAGS <- "BasketModel.txt"
cat("
# This file is created dynamically by R file for this project.
model {
for(i in 1:8) {
x[i] ~ dbin(p[i], n[i])
logit(p[i]) <- thetas[i] + beta*z[i]
thetas[i] ~ dnorm(theta,tau)
}
theta ~ dnorm(0,0.0001)
beta ~ dnorm(0,0.0001)
tau <- 1/sigma
sigma ~ dunif(0,1000)
p.all <- exp(theta)/(1+exp(theta))
}
", file= fileForJAGS)
# n.chains = number of chains to monitor (with different starting values)
# n.adapt = initial sample phase to optimize the sampling mechanism (e.g. Metropolis Hastings step size)
# inits = initial values from above
jags.Conj <- jags.model("BasketModel.txt",data=df,n.chains=1, n.adapt=10000, inits=inits1)
## burn-in
update( jags.Conj , n.iter=10000 )
## Store MCMC samples after burn-in
## Arugments are the parameters to be monitored and the number of samples to take
#res <- benchmark(coda.samples(jags.Conj,c('p','p.all','theta'),40000))
#print(res)
mcmc.samples.Conj <- coda.samples(jags.Conj,c('p','p.all','theta'),40000)
post = summary(mcmc.samples.Conj,quantiles=c(0.05,0.95))
post