#Load functions from packages using GaussianProcesses, Plots using Distributions:Normal, TDist using Random using Statistics #Simulate the data Random.seed!(112233) n = 20 X = range(-3,stop=3,length=n); sigma = 1.0 Y = X + sigma*rand(TDist(3),n); # Plots observations pyplot() scatter(X,Y;fmt=:png, leg=false) #Build the models gpe = GPE(X,vec(Y),MeanZero(),Matern(3/2,0.0,0.0),0.5) #Exact GP assuming Gaussian noise l = StuTLik(3,0.1) gpa = GPA(X, vec(Y), MeanZero(), Matern(3/2,0.0,0.0), l) #Approximate GP with student-t likelihood optimize!(gpe) set_priors!(gpa.lik,[Normal(-2.0,4.0)]) set_priors!(gpa.kernel,[Normal(-2.0,4.0),Normal(-2.0,4.0)]) samples = mcmc(gpa;nIter=10000,burn=1000,thin=2); #Plot posterior samples xtest = range(minimum(gpa.x),stop=maximum(gpa.x),length=50); #Set the parameters to the posterior values the sample random function fsamples = []; for i in 1:size(samples,2) set_params!(gpa,samples[:,i]) update_target!(gpa) push!(fsamples, rand(gpa,xtest)) end #Predict p1=plot(gpe,leg=false, title="Exact GP") #Exact GP (assuming Gaussian noise) sd = [std(fsamples[i]) for i in 1:50] p2=plot(xtest,mean(fsamples),ribbon=2*sd,leg=false, title="Monte Carlo GP") #GP Monte Carlo with student-t noise scatter!(X,Y) plot(p1,p2;fmt=:png)