Based on a mix of https://github.com/dfm/george/blob/master/docs/_code/model.py and http://dan.iel.fm/george/current/user/hyper/
but adapted to do optimization on a model that includes GP and a signal component by Kyle on Aug 14, 2015
%pylab inline --no-import-all
Populating the interactive namespace from numpy and matplotlib
from __future__ import division, print_function
import emcee
#import triangle
import triangle_plot as triangle
import numpy as np
import matplotlib.pyplot as pl
import george
from george import kernels
import scipy.optimize as op
def model(params, t):
amp, loc, sig2 = params
return amp * np.exp(-0.5 * (t - loc) ** 2 / sig2)
def generate_data(params, N, rng=(-5, 5)):
gp = george.GP(0.1 * kernels.ExpSquaredKernel(3.3))
t = rng[0] + np.diff(rng) * np.sort(np.random.rand(N))
y = gp.sample(t)
y += model(params, t)
yerr = 0.05 + 0.05 * np.random.rand(N)
y += yerr * np.random.randn(N)
return t, y, yerr
#we won't use this will use nll instead
def lnlike_gp(p, t, y, yerr):
a, tau = np.exp(p[:2])
gp = george.GP(a * kernels.Matern32Kernel(tau))
gp.compute(t, yerr)
return gp.lnlikelihood(y - model(p[2:], t))
#we won't use this b/c we aren't using emcee
def lnprior_gp(p):
lna, lntau = p[:2]
if not -5 < lna < 5:
return -np.inf
if not -5 < lntau < 5:
return -np.inf
return lnprior_base(p[2:])
#we won't use this b/c we aren't using emcee
def lnprob_gp(p, t, y, yerr):
lp = lnprior_gp(p)
if not np.isfinite(lp):
return -np.inf
return lp + lnlike_gp(p, t, y, yerr)
#we won't use this b/c we aren't using emcee
def fit_gp(initial, data, nwalkers=32):
ndim = len(initial)
p0 = [np.array(initial) + 1e-8 * np.random.randn(ndim)
for i in xrange(nwalkers)]
sampler = emcee.EnsembleSampler(nwalkers, ndim, lnprob_gp, args=data)
print("Running burn-in")
p0, lnp, _ = sampler.run_mcmc(p0, 500)
sampler.reset()
print("Running second burn-in")
p = p0[np.argmax(lnp)]
p0 = [p + 1e-8 * np.random.randn(ndim) for i in xrange(nwalkers)]
p0, _, _ = sampler.run_mcmc(p0, 500)
sampler.reset()
print("Running production")
p0, _, _ = sampler.run_mcmc(p0, 5000)
return sampler
def runToy(plots=False):
truth = [1., 0.1, 0.4] #changed amplitdue to +1 for HEP not exoplanets
t, y, yerr = generate_data(truth, 50)
p = [0.0, 0.0] + truth
a, tau = np.exp(p[:2])
gp = george.GP(a * kernels.Matern32Kernel(tau))
gp.compute(t, yerr)
# Define the objective function (negative log-likelihood in this case).
def nll(p):
# The scipy optimizer doesn't play well with infinities.
# Update the kernel parameters and compute the likelihood.
gp.kernel[:] = p[:2]
ll = gp.lnlikelihood(y - model(p[2:],t), quiet=True)
#ll = gp.lnlikelihood(y , quiet=True)
return -ll if np.isfinite(ll) else 1e25
# And the gradient of the objective function.
# we won't use this either
def grad_nll(p):
# Update the kernel parameters and compute the likelihood.
gp.kernel[:] = p[:2]
return -gp.grad_lnlikelihood(y - model(p[2:],t), quiet=True)
#return -gp.grad_lnlikelihood(y , quiet=True)
# Print the initial ln-likelihood.
print(gp.lnlikelihood(y - model(p[2:],t)))
# Run the optimization routine.
#results = op.minimize(nll, p, jac=grad_nll)
# don't know gradient for other part of model
results = op.minimize(nll, p)
print(results.x)
p = results.x
# Update the kernel and print the final log-likelihood.
gp.kernel[:] = results.x[:2]
print(gp.lnlikelihood(y - model(p[2:],t)))
if plots:
# Plot the samples in data space.
print("Making plots")
x = np.linspace(-5, 5, 500)
pl.figure()
pl.errorbar(t, y, yerr=yerr, fmt=".k", capsize=0)
m, cov = gp.predict(y-model(p[2:],t), x)
pl.plot(x, m, color="#4682b4", alpha=0.3)
pl.plot(x, model(p[2:], x), color="r", alpha=0.3)
pl.plot(x, m + model(p[2:], x), color="g", alpha=0.3)
pl.ylabel(r"$y$")
pl.xlabel(r"$t$")
pl.xlim(-5, 5)
pl.title("results with Gaussian process noise model")
pl.savefig("gp-bestfit.png")
print( p[2])
return p[2]
runToy(True)
15.41331803 [-1.51625279 3.95584627 0.80565782 0.05470704 0.39135082] 47.9155631921 Making plots 0.805657822905
0.80565782290530052
ampArray = []
for i in range(100):
try:
ampArray.append(runToy(False))
except ValueError:
print("didnt' work")
10.5130329837 [-1.88152124 3.11820141 1.18211775 0.08629853 0.38843503] 39.7604591178 1.18211774686 9.26494351615 [-2.89093218 4.88919371 0.89003035 0.13527701 0.39970359] 47.6048965046 0.890030354939 12.9376133281 [-3.31906483 1.58281901 0.6433711 0.10227122 0.23400752] 44.8787107836 0.64337110044 14.1489237355 [-2.03314038 4.06745276 0.88600515 0.0865865 0.2968777 ] 45.8597827147 0.88600514811 14.9117800399 [-3.95413819 0.97107048 1.06785868 0.14287222 0.40103592] 47.8622676902 1.06785867871 16.8560203739 [-2.09708453 2.72838606 0.97549295 0.10134833 0.45994461] 48.0306003749 0.975492952271 10.9393361438 [-2.29455159 3.42523137 0.7702139 0.18327781 0.34659198] 40.5735842994 0.770213903033 11.2156938342 [-1.81096849 2.46457994 0.89795414 0.17097312 0.30310444] 40.5842214333 0.897954140168 17.5494779228 [-0.42598877 3.60127551 1.09983507 0.1166437 0.45332901] 48.9212581736 1.09983506812 15.0006538406 [-2.50281691 4.34024394 1.05027303 0.18673767 0.3297566 ] 47.4934650835 1.05027303346 7.50887607391 [-2.64251628 1.52173966 0.79122487 0.02142701 0.37207166] 31.2765130245 0.791224869335 16.2462735326 [ -3.07312949e+00 1.38495825e+00 1.27307178e+00 2.03918480e-03 5.35120622e-01] 44.1539038314 1.27307178446 15.6640021794 [-2.36377687 4.76895925 1.00545653 0.15149833 0.39727537] 46.7218673667 1.00545652653 18.0123480008 [-1.61370876 3.26940439 1.208133 0.13418382 0.39555256] 46.8050597776 1.20813299504 17.9724854721 [-2.57334913 1.61418052 0.78846664 0.04361952 0.28590148] 42.6386967854 0.788466639834 17.4650522901 [-1.68104778 3.15990226 1.04177189 0.01561766 0.38720677] 46.0724996233 1.04177188867 15.4215457124 [-1.9757408 2.15256756 1.2306642 0.13431879 0.48192065] 35.5298509639 1.23066420262 20.4040541462 didnt' work 15.9942810804 [-0.40029845 4.00520451 1.30734773 0.03101219 0.61239882] 48.4536620214 1.30734773287 19.7585163663 [-2.18716302 1.89424435 0.96695597 0.17112119 0.54223984] 44.4855193165 0.966955974843 19.1361869202 didnt' work 16.4917669779 [-0.79090529 3.48432572 1.14651207 0.15052579 0.4057842 ] 39.8646448973 1.14651206835 20.5029011498 [-1.90456171 3.9496788 1.05593315 0.09194221 0.38146054] 54.3375215489 1.05593314601 17.6327129823 [-2.76972409 3.09427981 1.21970511 -0.00561511 0.4849346 ] 49.660283607 1.21970511482 16.2221903928 [-2.41525779 2.38344055 0.91907845 0.06260115 0.37372593] 45.0304483933 0.919078449545 15.9500468144 [-2.37634186 2.40764076 0.98798901 0.12623913 0.48722087] 44.0919797118 0.987989012864 17.4981651489 [-2.75270288 3.49252402 1.02601425 0.1189846 0.40729756] 55.6753836374 1.02601425355 14.8702704563 [-2.09641312 3.63167026 0.95672968 0.05086181 0.40204176] 47.1439370535 0.956729684673 18.4468749821 [-1.32826891 3.32443246 0.93750326 0.18831019 0.30649306] 44.9947277036 0.937503260356 18.598981673 [-2.70812902 2.64480965 1.08426574 -0.01070898 0.45953638] 49.350151548 1.08426574062 13.2995884271 [-2.37536264 2.61744955 1.20740396 0.02764622 0.53833449] 43.3746065344 1.20740396348 17.6076623809 [-1.81520757 3.26163674 1.00214438 0.09883254 0.4250204 ] 47.3840707703 1.00214437941 19.6134675245 [-3.12146418 1.83551108 1.09321279 0.05724046 0.46495714] 51.680722736 1.09321278751 15.4340040109 [-3.76632441 1.22427032 0.80273661 0.16611293 0.43944372] 43.6490335363 0.802736610125 14.4925414235 [-2.99021062 1.56902287 1.02196143 0.29262355 0.19509729] 38.1957866517 1.02196142665 20.6784753292 [-2.89552398 1.46505407 0.87031631 0.15599494 0.41476334] 45.2754551639 0.870316309638 18.1374601787 [-2.07738115 2.52034277 0.93566755 0.15265382 0.39758077] 45.7458609881 0.935667552236 15.4767370564 [-1.95700464 2.70620168 0.96030173 0.1271678 0.30387325]
/Library/Python/2.7/site-packages/IPython/kernel/__main__.py:3: RuntimeWarning: overflow encountered in exp app.launch_new_instance() /System/Library/Frameworks/Python.framework/Versions/2.7/Extras/lib/python/scipy/optimize/linesearch.py:422: RuntimeWarning: invalid value encountered in double_scalars radical = B * B - 3 * A * C
43.3385564384 0.960301732078 16.3153660746 [-2.46453958 2.19176277 1.09786422 0.00725849 0.43707478] 44.7994000384 1.0978642174 13.5946922614 [-3.27927878 1.74731378 0.97731072 0.05959335 0.31702441] 45.3943206646 0.977310724779 16.3186571743 [-1.69374013 2.91916476 1.09807618 0.16863006 0.41220003] 42.7667710872 1.09807617834 14.6490941543 [-1.02773538 0.88835631 1.00478036 0.12670817 0.36502807] 30.9681931205 1.00478035979 13.3038738514 [-2.77195588 2.86728326 1.12689946 0.16442857 0.65612676] 42.8266459585 1.12689946047 15.8072836618 [-1.72834786 3.31255398 1.13070189 0.08517504 0.33864718] 47.6113890086 1.13070189415 16.9070536187 [-3.94320677 0.94144838 1.34819054 0.20134796 0.63722943] 49.48069514 1.34819053877 14.947443991 [-1.75790849 4.21623964 0.97301907 0.03279656 0.38640311] 52.0810212383 0.973019071588 14.1780928431 [-3.15516153 1.78213242 1.00417656 0.01595351 0.332391 ] 44.8550174758 1.00417656348 17.7803467482 [-2.08161091 3.19020362 0.98690653 0.13970777 0.37576995] 51.238786405 0.98690653258 12.9226649254 [-2.21004649 2.9081506 0.88682768 0.17311945 0.37024171] 45.9593685434 0.886827677687 15.6340046212 [-2.00874439 2.79911572 1.02405777 0.11399997 0.45336377] 46.8510548205 1.02405777353 15.665474515 [-2.76586151 3.05478482 0.76041781 0.12616592 0.40330353] 51.3694348105 0.760417812476 19.547729271 [-4.38920852 2.76407933 1.04636453 0.01364809 0.41625034] 51.9448172627 1.04636452783 12.3466884419 [ -1.76580038e+00 3.02789103e+00 1.14764400e+00 1.72582659e-03 4.91620168e-01] 33.8666173157 1.14764400289 19.235542788 [-2.63167062 1.63850539 1.25117494 0.11055845 0.43252332] 43.504667308 1.25117494453 17.6592428686 didnt' work 15.866204327 [-2.43304251 1.18103026 -0.69557309 1.66840291 0.47609223] 37.3907538113 -0.695573094516 21.191219478 [ -2.04788186e+00 4.05132731e+00 1.50633681e+00 -3.02942065e-03 6.23673880e-01] 55.2995331847 1.50633680846 21.2475450349 [-2.38104849 2.44572509 1.27206234 0.06208141 0.47922576] 50.0961067852 1.27206234146 14.2833919871 [-2.41197075 2.57387245 1.29442524 0.13542071 0.47880761] 42.7590736164 1.29442523885 18.7428950563 [-2.97455351 1.95543794 0.85208195 0.16061142 0.33245891] 46.8665118958 0.852081947985 16.94482128 [ -4.68920254 20.15898254 1.02852952 0.03058887 0.23072255] 28.1237589751 1.02852952048 8.22823726025 [-0.92978461 3.39484755 1.11297581 0.08361134 0.36523356] 33.5369063192 1.11297580989 16.4209859333 [-2.04864263 2.63601658 0.83105098 0.07369726 0.36765924] 42.5034894607 0.831050978588 18.5054937622 [-2.98105843 5.5256864 0.87992397 0.06933491 0.41625179] 53.4469170543 0.879923967439 17.4798070146 [-3.20895008 1.76688038 0.92338323 0.17603546 0.29329952] 47.349435366 0.923383225684 12.9925425998 [-3.0720572 1.34053204 1.13615703 0.05504447 0.41771403] 40.2178825887 1.13615702671 18.0590868097 [-1.66959103 3.4903732 1.05393098 0.06266667 0.43425215] 47.5019118664 1.05393097665 23.8786658937 [-2.89041584 2.26961261 0.91429725 0.026129 0.35314479] 53.7632743899 0.914297254031 13.3843803742 [-1.45297512 4.00360741 1.40840728 0.09235014 0.48980754] 47.8853708663 1.40840727874 17.7564760601 [-3.00042306 2.11911419 0.94619474 0.11771719 0.36162591] 45.7520484501 0.946194736613 16.2232365402 [-2.02766849 2.97332963 0.81471508 0.07247609 0.34299542] 44.8192699448 0.814715075176 15.762709567 [-2.27391591 3.39340338 1.16600923 0.1511587 0.50085341] 48.8246256743 1.16600922997 14.7328268314 [-3.49744987 1.65800571 1.00162174 0.07841259 0.39726179] 43.3095807872 1.001621741 15.8763104991 [-2.77243258 2.09596287 1.20973554 0.13624731 0.47198946] 47.6298531501 1.20973554015 17.6955890194 [-1.4223119 3.86561573 0.94560422 0.19144015 0.33680283] 53.404654902 0.945604218163 17.1832985177 [-2.50474782 2.68054421 0.99857382 0.12073886 0.33465955] 49.0264937097 0.998573819945 14.3950230755 [-2.77671365 3.60770368 1.37407544 0.00643752 0.52859814] 40.3719019904 1.37407544182 21.2538099445 [-1.82453813 7.55211171 1.18428989 0.09512144 0.60743101] 54.2609098923 1.18428988606 16.1002978036 [-2.53700288 2.63601683 1.03227864 0.07290974 0.4659636 ] 47.4353365803 1.0322786432 17.9898870361 [-2.99376259 3.96117154 1.11003672 0.10465087 0.44692979] 53.8426570077 1.1100367241 16.7635208955 [-4.17622018 -0.6318599 1.51037609 0.2230749 0.8006491 ] 44.6613939378 1.51037608855 18.6979852705 [-2.76277378 1.8194911 1.26055235 0.29166197 0.75015291] 46.8527597324 1.26055234963 12.1363743453 [-1.43518409 3.76593866 1.48264631 0.05471907 0.64918992] 42.3810538318 1.48264631218 14.6620348343 [-1.60843603 3.32178331 1.07215059 0.11136854 0.33747885] 44.891372329 1.07215058522 20.0055337729 [-3.31647012 2.69735567 0.83870296 0.21060783 0.31430147] 55.0845281697 0.838702962938 12.049389649 [-2.48023578 4.32292195 1.14627218 0.13107656 0.56880273] 48.2496752832 1.14627218333 19.6300601231 [-2.53657121 3.04699929 0.75484477 0.05046606 0.30938218] 53.7546896072 0.754844774886 20.0970273301 [-1.65800784 3.23237041 0.81237716 0.05781758 0.32517475] 50.8410429122 0.812377164806 17.9825063282 [-0.04309149 4.98525703 0.95746195 0.06187881 0.31014457] 50.83643655 0.957461953332 19.0424728451 [-2.93140217 3.78375042 0.7975963 0.17657646 0.38825592] 47.9515383267 0.797596299048 15.8951381263 [-2.0667258 3.51190226 0.95658072 0.17070022 0.45978287] 45.8295432722 0.956580718038 18.5281733101 [-1.89693599 2.33707025 1.11545239 0.14000596 0.33558084] 45.0589467154 1.11545238805 16.9875708086 [-2.84347591 3.76032061 1.04551582 0.1012127 0.38741414] 55.9975556872 1.04551581691 16.1090079538 [-4.06861514 1.09318096 0.89497263 0.08650664 0.46198827] 44.8358210203 0.894972634211 19.4819704024 [-2.6801679 1.95351174 0.9078591 0.10333826 0.27216602] 49.093669613 0.907859101689 19.2873329582 [-2.12040185 2.28756389 1.11629846 0.03703377 0.4844019 ] 42.9864714125 1.11629846022 23.7242289286 [-1.59805546 3.55262392 1.07487121 0.1101012 0.47736249] 54.3221893671 1.07487120554 20.5211056458 [-0.41883564 0.37842434 0.99284268 0.10454719 0.38092434] 29.0888407 0.992842680418 17.5627077898 [-2.22330776 2.1028349 0.94934709 0.08846255 0.27604143] 42.2364155915 0.949347091385 15.7447588574 [-1.89311744 2.98377358 1.12401143 0.06209512 0.43982846] 47.4868993755 1.12401143011
/System/Library/Frameworks/Python.framework/Versions/2.7/Extras/lib/python/scipy/optimize/linesearch.py:427: RuntimeWarning: overflow encountered in double_scalars xmin = a + (-B + np.sqrt(radical)) / (3 * A)
plt.hist(ampArray)
print(np.mean(ampArray))
1.02350504175