Original BUGS model from Link and Barker 2009
# Data
list(T=2, Cells=4, f=c(NA,22,363,174),
omegas=structure(.Data=c(0,0,
1,0,
0,1,
1,1),.Dim=c(4,2)))
model
{
for (t in 1:T)
{
p[t] ~ dunif(0,1)
}
for (j in 1:Cells)
{
for (t in 1:T)
{
c[j,t] <- pow(p[t],omegas[j,t]) * pow(1-p[t],1-omegas[j,t])
}
pi[j] <- prod(c[j,1:T])
}
for (j in 2:Cells)
{
pi.obs[j] <- pi[j]/(1-pi[1])
}
n <- sum(f[2:Cells])
f[2:Cells] ~ dmulti(pi.obs[2:Cells],n)
pn <- 1-pi[1]
n ~ dbin(pn,N)
cN ~ dunif(n,10000)
N <- round(cN)
}
%matplotlib inline
import numpy as np
import pymc3 as pm
import theano.tensor as tt
# Number of observers
T = 2
# Observations by neither, both, first and second observer
f = np.array([np.nan, 22, 363, 174])
# Total observations
n = np.nansum(f)
# Observer indicators
omega = np.reshape((0,0,
1,0,
0,1,
1,1), (4, 2))
with pm.Model() as Mt2:
# Individual detection probabilities
p = pm.Uniform('p', 0, 1, shape=T)
# Joint probabilities
c = p**omega * (1 - p)**(1-omega)
π = pm.Deterministic('π', tt.prod(c, 1))
# Conditional (on observation) probabilities
π_obs = π[1:] / (1 - π[0])
f_obs = pm.Potential('f_obs', pm.Multinomial.dist(n=n, p=π_obs).logp(f[1:]))
# Latent population size
N = pm.Uniform('N', 0, 10000)
# Probability of being observed by one or more observers
n_obs = pm.Potential('n_obs', pm.Binomial.dist(n=tt.floor(N), p=1-π[0]).logp(n))
with Mt2:
trace = pm.sample(1000, tune=1000)
Auto-assigning NUTS sampler... Initializing NUTS using jitter+adapt_diag... 100%|██████████| 2000/2000 [04:26<00:00, 7.56it/s]/Users/fonnescj/Repos/pymc3/pymc3/step_methods/hmc/nuts.py:459: UserWarning: Chain 0 reached the maximum tree depth. Increase max_treedepth, increase target_accept or reparameterize. 'reparameterize.' % self._chain_id) /Users/fonnescj/Repos/pymc3/pymc3/step_methods/hmc/nuts.py:451: UserWarning: The acceptance probability in chain 0 does not match the target. It is 0.715818227235, but should be close to 0.8. Try to increase the number of tuning steps. % (self._chain_id, mean_accept, target_accept)) /Users/fonnescj/Repos/pymc3/pymc3/step_methods/hmc/nuts.py:467: UserWarning: Chain 0 contains 16 diverging samples after tuning. If increasing `target_accept` does not help try to reparameterize. % (self._chain_id, n_diverging))
pm.plot_posterior(trace, varnames=['N'])
<matplotlib.axes._subplots.AxesSubplot at 0x1295292b0>
pm.summary(trace, varnames=['p'])
p: Mean SD MC Error 95% HPD interval ------------------------------------------------------------------- 0.325 0.020 0.002 [0.290, 0.367] 0.894 0.022 0.002 [0.853, 0.936] Posterior quantiles: 2.5 25 50 75 97.5 |--------------|==============|==============|--------------| 0.287 0.311 0.323 0.337 0.365 0.853 0.879 0.893 0.910 0.937
pm.forestplot(trace, varnames=['π'])
<matplotlib.gridspec.GridSpec at 0x127ae55f8>