Given the model:
$$ \begin{align} y_t & = w' x_{t-1} + \varepsilon_t \\ x_t & = F x_{t-1} + g \varepsilon_t \end{align} $$with $\varepsilon_t \sim N(0, \sigma_\varepsilon^2)$, we can rewrite it as:
$$ \begin{align} y_t & = \begin{bmatrix} w' & 1 \end{bmatrix} \begin{bmatrix} x_{t-1} \\ \varepsilon_t \end{bmatrix} \\ \begin{bmatrix} x_{t} \\ \varepsilon_{t+1} \end{bmatrix} & = \begin{bmatrix} F & g \\ 0 & 0 \end{bmatrix} \begin{bmatrix} x_{t-1} \\ \varepsilon_{t} \end{bmatrix} + \begin{bmatrix} 0 \\ 1 \end{bmatrix} \varepsilon_{t+1} \end{align} $$Below we consider the special case of the local level model, ETS(A,N,N), which has $w = F = 1$, i.e.:
$$ \begin{align} y_t & = x_{t-1} + \varepsilon_t \\ x_t & = x_{t-1} + g \varepsilon_t \end{align} $$%matplotlib inline
import numpy as np
import pandas as pd
import statsmodels.api as sm
import matplotlib.pyplot as plt
np.random.seed(1234)
nobs = 1000
w = 1
F = 1
g = 0.5
e = np.random.normal(scale=1, size=nobs+1)
x = np.zeros(nobs+1)
y = np.zeros(nobs)
for t in range(1, nobs+1):
x[t] = F * x[t-1] + g * e[t]
y[t-1] = w * x[t-1] + e[t]
y2 = w * x[:-1] + e[1:]
class Test(sm.tsa.statespace.MLEModel):
_param_names = ['g', 'sigma_eps']
def __init__(self, endog):
k_states = 2
k_posdef = 1
super(Test, self).__init__(endog, k_states=k_states, k_posdef=k_posdef,
initialization='approximate_diffuse')
self['design', 0, :] = 1
self['transition', 0, 0] = 1
self['selection', 1, 0] = 1
@property
def start_params(self):
return [0, 1.]
def transform_params(self, unconstrained):
constrained = unconstrained.copy()
constrained[-1] = unconstrained[-1]**2
return constrained
def untransform_params(self, constrained):
unconstrained = constrained.copy()
unconstrained[-1] = constrained[-1]**0.5
return unconstrained
def update(self, params, **kwargs):
g, sigma2_eps = super(Test, self).update(params, **kwargs)
self['transition', 0, 1] = g
self['state_cov', 0, 0] = sigma2_eps
mod = Test(y)
res = mod.fit(method='powell', maxiter=1000)
print res.summary()
Optimization terminated successfully. Current function value: 1.403486 Iterations: 2 Function evaluations: 62 Statespace Model Results ============================================================================== Dep. Variable: y No. Observations: 1000 Model: Test Log Likelihood -1403.486 Date: Wed, 17 Feb 2016 AIC 2810.971 Time: 15:58:55 BIC 2820.787 Sample: 0 HQIC 2814.702 - 1000 Covariance Type: opg ============================================================================== coef std err z P>|z| [0.025 0.975] ------------------------------------------------------------------------------ g 0.4547 0.026 17.482 0.000 0.404 0.506 sigma_eps 0.9457 0.042 22.371 0.000 0.863 1.029 =================================================================================== Ljung-Box (Q): 25.01 Jarque-Bera (JB): 0.12 Prob(Q): 0.97 Prob(JB): 0.94 Heteroskedasticity (H): 1.06 Skew: -0.03 Prob(H) (two-sided): 0.60 Kurtosis: 3.02 =================================================================================== Warnings: [1] Covariance matrix calculated using the outer product of gradients.
fig, axes = plt.subplots(2, figsize=(13,6))
ax = axes[0]
ax.plot(x[:-1], label='True')
ax.plot(res.smoothed_state[0], label='Est.')
ax.legend(loc='upper left')
ax.set(title=r'True vs. Estimated level $(x_t)$');
ax = axes[1]
ax.plot(e[1:], label='True')
ax.plot(res.smoothed_state[1], label='Est.')
ax.legend(loc='upper left')
ax.set(title='True vs. Estimated Disturbance $(e_t)$');