In [1]:
%matplotlib inline
import pymc3 as pm
import numpy as np
import pandas as pd
import theano.tensor as tt
from theano import shared, config

from theano.tensor.shared_randomstreams import RandomStreams
srng = RandomStreams(seed=20090425)
In [23]:
config.exception_verbosity='high'
In [3]:
data15 = pd.read_csv('mortalities.15km.dword.1996.2013.csv')
data30 = pd.read_csv('mortalities.30km.dword.1996.2013.csv')
In [4]:
# FL regions
regions = ['ATL', 'USJ', 'NW', 'SW']
ATL, USJ, NW, SW = range(4)

# Winter habitat quality
habitats = ['Low', 'Medium', 'High']
LOW, MED, HIGH = range(3)

# Causes of death
causes = ['watercraft', 'wcs', 'debris', 'cold', 'redtide', 'other']
long_causes = causes + ['undetermined']
WATERCRAFT, WCS, DEBRIS, COLD, REDTIED, OTHER, UNDETERMINED = range(7)

# Age classes
classes = ['Calves', 'Subadults', 'Adults']

STARTYEAR, ENDYEAR = 1996, 2013

# Unusual mortality event red tide years
tide_years = (1996, 2002, 2003, 2005, 2006, 2012, 2013)
# Moderately unusual red tide years
tide_years0 = (2002, 2003, 2005, 2006, 2012)
# Intense red tide years
tide_years_intense = (1996, 2013)
# Atlantic unusual mortality event red tide years
atl_tide_years = (2007, 2008)

# Severe cold years
severe_years = {ATL: range(2010, 2011), USJ: range(2010, 2011), NW: (2003, 2010), SW: range(2010, 2011)} 

# Cold years
cold_years = {ATL: (1996, 2001, 2003), USJ: (1996, 2001, 2009, 2011), 
              NW: (1996, 1998, 2001, 2011), SW: (1996, 2001)}

Uninformative priors for unknown proportions

In [5]:
ncauses = len(causes), len(causes)-1, len(causes), len(causes)
prior0 = [np.ones(k) for k in ncauses]

From Langtimm et al. 2004 (mortality ratios from USJ)

In [6]:
calf1_mort_ratio = 0.190/0.031
calf2_mort_ratio = 0.085/0.031

Adult baseline mortalities and se from Langtimm et al. unpublished (2015).

Changed this to a tuple, so that you can just index with the appropriate constant

In [7]:
adult_mort_base = 0.027417, 0.020739, 0.022355, 0.023073

adult_mort_base[SW]
Out[7]:
0.023073

Generate survival rates and variances on logit scale

In [8]:
calf1_mort_base = np.array(adult_mort_base) * calf1_mort_ratio
calf2_mort_base = np.array(adult_mort_base) * calf2_mort_ratio
calf_mort_base = 1 - np.sqrt((1 - calf1_mort_base)*(1 - calf2_mort_base))

wcs_protect_year = 2001
In [9]:
calf_mort_base
Out[9]:
array([ 0.12283572,  0.09266703,  0.09995114,  0.1031908 ])
In [10]:
analysis_subset = (data30[(data30.year >= STARTYEAR) & (data30.year <= ENDYEAR) & (data30.age == 'Calves')]
                          .replace({'area': dict(zip(regions, range(4))),
                                   'habitat': dict(zip(habitats, range(3)))}))
In [11]:
analysis_subset.head()
Out[11]:
year age area habitat watercraft wcs debris cold redtide other undetermined total
0 1996 Calves 0 0 4 0 0 3 0 5 3 15
1 1997 Calves 0 0 6 0 0 0 0 3 7 16
2 1998 Calves 0 0 3 0 0 6 0 5 14 28
3 1999 Calves 0 0 4 1 0 2 0 2 11 20
4 2000 Calves 0 0 4 0 0 5 0 7 9 25
In [24]:
def mortality_model(data, prior=prior0, verbose=0):
    # Jeff: simplifying by getting rid of ATL red tide, I'm not really planning to use that anymore.
    with pm.Model(verbose=verbose) as model:
        
        # Red tide effect factors
        a_mort = pm.Uniform('a_mort', lower=0, upper=1-calf_mort_base[SW])
        a = a_mort / calf_mort_base[SW]
        
        b_mort = pm.Uniform('b_mort', lower=0, upper=1-calf_mort_base[SW])
        b = b_mort / calf_mort_base[SW]
        
        # Winter effect factors for each habitat type
        sev_mort = pm.Uniform('sev_mort', lower=0, upper=1-max(calf_mort_base), 
                              shape=(3,4))
        
        cold_mort_upper = np.resize(1-max(calf_mort_base), (3,4))
        cold_mort_upper[HIGH] = 0.0001
        cold_mort = pm.Uniform('cold_mort', lower=0, upper=cold_mort_upper, shape=(3,4))
        
        norm_mort_upper = np.resize(1-max(calf_mort_base), (3,4))
        norm_mort_upper[HIGH] = 0.0001
        norm_mort = pm.Uniform('norm_mort', lower=0, upper=norm_mort_upper, shape=(3,4))
        
        sev = sev_mort / np.resize(calf_mort_base, (3,4))
        cold = cold_mort / np.resize(calf_mort_base, (3,4))
        norm = norm_mort / np.resize(calf_mort_base, (3,4))
                
        p = [pm.Dirichlet('p_{}'.format(a), t) for t,a in zip(prior, regions)]
        
        π = [pm.Dirichlet('π_{}'.format(a), np.ones(k)) for k,a in zip(ncauses, regions)]
        
        for (area, year, habitat), data in data.groupby(['area', 'year', 'habitat']):
            
            undetermined = data.undetermined.values

            Z = (data[causes].drop('redtide', axis=1).values if area==USJ else data[causes].values).squeeze()
            
            # Only moderately unusual red tide (no cold effects)
            if ((year in tide_years0) and (area==SW) and (habitat==HIGH)):
                
                # theta varies from pi based on red tide UME 
                θ = tt.set_subtensor(π[area][4], π[area][4] + a) / (1+a) 
                η = p[area]

            # Moderately unusual red tide and normal cold
            elif ((year in tide_years0) and (area==SW)):
                
                no = norm[habitat, area]

                # theta varies from pi based on winter severity and on red tide UME 
                θ = tt.set_subtensor(π[area][4], π[area][4] + a) 
                θ = tt.set_subtensor(θ[3], θ[3] + no) / (1 + a + no) 

                # eta varies from p based on winter severity
                η = tt.set_subtensor(p[area][3], p[area][3] + no) / (1 + no)


            # Intense unusual red tide and cold year
            elif ((year in tide_years_intense) and (area==SW) and (year in cold_years[area])):
                
                co = cold[habitat, area]
                
                # theta varies from pi based on winter severity and on red tide UME 
                θ = tt.set_subtensor(π[area][4], π[area][4] + b) 
                θ = tt.set_subtensor(θ[3], θ[3] + co) / (1 + b + co) 

                # eta varies from p based on winter severity
                η = tt.set_subtensor(p[area][3], p[area][3] + co) / (1 + co)


            # Intense unusual red tide and severe cold year
            elif ((year in tide_years_intense) and (area==SW) and (year in severe_years[area])):

                sv = sev[habitat, area]
                
                # theta varies from pi based on red tide UME 
                θ = tt.set_subtensor(π[area][4], π[area][4] + b) 
                θ = tt.set_subtensor(θ[3], θ[3] + sv) / (1 + b + sv) 

                # eta varies from p based on winter severity
                η = tt.set_subtensor(p[area][3], p[area][3] + sv) / (1 + sv)


            # Intense unusual red tide only
            elif ((year in tide_years_intense) and (area==SW) and (habitat==HIGH)):
                
                # theta varies from pi based on red tide UME 
                θ = tt.set_subtensor(π[area][4], π[area][4] + b) / (1+b) 
                
                η = p[area]

            # Intense unusual red tide and normal cold
            elif (year in tide_years_intense) and (area==SW):
                
                no = norm[habitat, area]

                # theta varies from pi based on winter severity and on red tide UME 
                θ = tt.set_subtensor(π[area][4], π[area][4] + b) 
                θ = tt.set_subtensor(θ[3], θ[3] + no) / (1 + b + no) 

                # eta varies from p based on winter severity
                η = tt.set_subtensor(p[area][3], p[area][3] + no) / (1 + no)
                

            # Severe cold year
            elif (year in severe_years[area]):
                
                
                sv = sev[habitat, area]
                
                # theta varies from pi based on winter severity 
                θ = tt.set_subtensor(π[area][3], π[area][3] + sv) / (1 + sv)

                # eta varies from p based on winter severity
                η = tt.set_subtensor(p[area][3], p[area][3] + sv) / (1 + sv)
                

            # Cold year, low or medium habitat quality
            elif (year in cold_years[area]) and (habitat<HIGH):
                
                
                co = cold[habitat, area]
                                
                # theta varies from pi based on winter severity 
                θ = tt.set_subtensor(π[area][3], π[area][3] + co) / (1 + co)

                # eta varies from p based on winter severity
                η = tt.set_subtensor(p[area][3], p[area][3] + co) / (1 + co)
                

            # Normal cold year, low or medium habitat quality
            elif (habitat<HIGH):
                
                
                no = norm[habitat, area]

                # theta varies from pi based on winter severity 
                θ = tt.set_subtensor(π[area][3], π[area][3] + no) / (1 + no) 

                # eta varies from p based on winter severity
                η = tt.set_subtensor(p[area][3], p[area][3] + no) / (1 + no)
                

            # Normal year, high habitat quality
            else:             
                # No adjustment to proportions
                θ = π[area]
                η = p[area]
             
            tt.printing.Print('θ')(θ)
            if undetermined:
#                 U = pm.Multinomial('U_{}_{}_{}'.format(area, year, habitat), n=undetermined, p=η, shape=ncauses[area])
                U = srng.multinomial(n=undetermined, pvals=η).squeeze()
            
                deaths = Z + U
                
            else:
                
                deaths = Z
            
            if Z.sum() or undetermined:
            
                pm.Potential('X_like_{}_{}_{}'.format(area, year, habitat), 
                                 pm.Multinomial.dist(n=deaths.sum(), p=θ).logp(deaths))
            
        
    return(model)
In [25]:
calf_model = mortality_model(data=analysis_subset, prior=prior0, verbose=1)
Applied interval-transform to a_mort and added transformed a_mort_interval to model.
Applied interval-transform to b_mort and added transformed b_mort_interval to model.
Applied interval-transform to sev_mort and added transformed sev_mort_interval to model.
Applied interval-transform to cold_mort and added transformed cold_mort_interval to model.
Applied interval-transform to norm_mort and added transformed norm_mort_interval to model.
Applied stickbreaking-transform to p_ATL and added transformed p_ATL_stickbreaking to model.
Applied stickbreaking-transform to p_USJ and added transformed p_USJ_stickbreaking to model.
Applied stickbreaking-transform to p_NW and added transformed p_NW_stickbreaking to model.
Applied stickbreaking-transform to p_SW and added transformed p_SW_stickbreaking to model.
Applied stickbreaking-transform to π_ATL and added transformed π_ATL_stickbreaking to model.
Applied stickbreaking-transform to π_USJ and added transformed π_USJ_stickbreaking to model.
Applied stickbreaking-transform to π_NW and added transformed π_NW_stickbreaking to model.
Applied stickbreaking-transform to π_SW and added transformed π_SW_stickbreaking to model.
In [26]:
with calf_model:
    
    trace = pm.sample(5000, step=pm.Metropolis())
    #means, sds, elbo = pm.variational.advi(n=50000)
---------------------------------------------------------------------------
MissingInputError                         Traceback (most recent call last)
<ipython-input-26-4cf7dc9675fe> in <module>()
      1 with calf_model:
      2 
----> 3     trace = pm.sample(5000, step=pm.Metropolis())
      4     #means, sds, elbo = pm.variational.advi(n=50000)

/Users/fonnescj/Repositories/pymc3/pymc3/step_methods/arraystep.py in __new__(cls, *args, **kwargs)
     56                 # If we don't return the instance we have to manually
     57                 # call __init__
---> 58                 step.__init__([var], *args, **kwargs)
     59                 # Hack for creating the class correctly when unpickling.
     60                 step.__newargs = ([var], ) + args, kwargs

/Users/fonnescj/Repositories/pymc3/pymc3/step_methods/metropolis.py in __init__(self, vars, S, proposal_dist, scaling, tune, tune_interval, model, **kwargs)
     99 
    100         shared = make_shared_replacements(vars, model)
--> 101         self.delta_logp = delta_logp(model.logpt, vars, shared)
    102         super(Metropolis, self).__init__(vars, shared)
    103 

/Users/fonnescj/Repositories/pymc3/pymc3/step_methods/metropolis.py in delta_logp(logp, vars, shared)
    294     logp1 = CallableTensor(logp0)(inarray1)
    295 
--> 296     f = theano.function([inarray1, inarray0], logp1 - logp0)
    297     f.trust_input = True
    298     return f

/Users/fonnescj/anaconda3/lib/python3.5/site-packages/theano/compile/function.py in function(inputs, outputs, mode, updates, givens, no_default_updates, accept_inplace, name, rebuild_strict, allow_input_downcast, profile, on_unused_input)
    320                    on_unused_input=on_unused_input,
    321                    profile=profile,
--> 322                    output_keys=output_keys)
    323     # We need to add the flag check_aliased inputs if we have any mutable or
    324     # borrowed used defined inputs

/Users/fonnescj/anaconda3/lib/python3.5/site-packages/theano/compile/pfunc.py in pfunc(params, outputs, mode, updates, givens, no_default_updates, accept_inplace, name, rebuild_strict, allow_input_downcast, profile, on_unused_input, output_keys)
    478                          accept_inplace=accept_inplace, name=name,
    479                          profile=profile, on_unused_input=on_unused_input,
--> 480                          output_keys=output_keys)
    481 
    482 

/Users/fonnescj/anaconda3/lib/python3.5/site-packages/theano/compile/function_module.py in orig_function(inputs, outputs, mode, accept_inplace, name, profile, on_unused_input, output_keys)
   1821                    profile=profile,
   1822                    on_unused_input=on_unused_input,
-> 1823                    output_keys=output_keys).create(
   1824             defaults)
   1825 

/Users/fonnescj/anaconda3/lib/python3.5/site-packages/theano/compile/function_module.py in __init__(self, inputs, outputs, mode, accept_inplace, function_builder, profile, on_unused_input, fgraph, output_keys)
   1473             # OUTPUT VARIABLES)
   1474             fgraph, additional_outputs = std_fgraph(inputs, outputs,
-> 1475                                                     accept_inplace)
   1476             fgraph.profile = profile
   1477         else:

/Users/fonnescj/anaconda3/lib/python3.5/site-packages/theano/compile/function_module.py in std_fgraph(input_specs, output_specs, accept_inplace)
    175 
    176     fgraph = gof.fg.FunctionGraph(orig_inputs, orig_outputs,
--> 177                                   update_mapping=update_mapping)
    178 
    179     for node in fgraph.apply_nodes:

/Users/fonnescj/anaconda3/lib/python3.5/site-packages/theano/gof/fg.py in __init__(self, inputs, outputs, features, clone, update_mapping)
    180 
    181         for output in outputs:
--> 182             self.__import_r__(output, reason="init")
    183         for i, output in enumerate(outputs):
    184             output.clients.append(('output', i))

/Users/fonnescj/anaconda3/lib/python3.5/site-packages/theano/gof/fg.py in __import_r__(self, variable, reason)
    369         # Imports the owners of the variables
    370         if variable.owner and variable.owner not in self.apply_nodes:
--> 371                 self.__import__(variable.owner, reason=reason)
    372         if (variable.owner is None and
    373                 not isinstance(variable, graph.Constant) and

/Users/fonnescj/anaconda3/lib/python3.5/site-packages/theano/gof/fg.py in __import__(self, apply_node, check, reason)
    411                             "for more information on this error."
    412                             % str(node)),
--> 413                             variable=r)
    414 
    415         for node in new_nodes:

MissingInputError: An input of the graph, used to compute Elemwise{exp,no_inplace}(cold_mort_interval_), was not provided and not given a value.Use the Theano flag exception_verbosity='high',for more information on this error.