using AugmentedGaussianProcesses const AGP = AugmentedGaussianProcesses using Distributions using Plots N = 1000 X = reshape((sort(rand(N)) .- 0.5) * 40.0, N, 1) σ = 0.01 function latent(x) return 5.0 * sinc.(x) end Y = vec(latent(X) + σ * randn(N)); scatter(X, Y; lab="") Ms = [4, 8, 16, 32, 64]; models = Vector{AbstractGP}(undef, length(Ms) + 1); kernel = SqExponentialKernel();# + PeriodicKernel() for (index, num_inducing) in enumerate(Ms) @info "Training with $(num_inducing) points" m = SVGP( X, Y, # First arguments are the input and output kernel, # Kernel GaussianLikelihood(σ), # Likelihood used AnalyticVI(), # Inference usede to solve the problem num_inducing; # Number of inducing points used optimiser=false, # Keep kernel parameters fixed Zoptimiser=false, # Keep inducing points locations fixed ) @time train!(m, 100) # Train the model for 100 iterations models[index] = m # Save the model in the array end @info "Training with full model" mfull = GP( X, Y, kernel; noise=σ, opt_noise=false, # Keep the noise value fixed optimiser=false, # Keep kernel parameters fixed ) @time train!(mfull, 5); models[end] = mfull; function compute_grid(model, n_grid=50) mins = -20 maxs = 20 x_grid = range(mins, maxs; length=n_grid) # Create a grid y_grid, sig_y_grid = proba_y(model, reshape(x_grid, :, 1)) # Predict the mean and variance on the grid return y_grid, sig_y_grid, x_grid end; function plotdata(X, Y) return Plots.scatter(X, Y; alpha=0.33, msw=0.0, lab="", size=(300, 500)) end function plot_model(model, X, Y, title=nothing) n_grid = 100 y_grid, sig_y_grid, x_grid = compute_grid(model, n_grid) title = if isnothing(title) (model isa SVGP ? "M = $(dim(model[1]))" : "full") else title end p = plotdata(X, Y) Plots.plot!( p, x_grid, y_grid; ribbon=2 * sqrt.(sig_y_grid), # Plot 2 std deviations title=title, color="red", lab="", linewidth=3.0, ) if model isa SVGP # Plot the inducing points as well Plots.plot!( p, vec(model.f[1].Z), zeros(dim(model.f[1])); msize=2.0, color="black", t=:scatter, lab="", ) end return p end; Plots.plot( plot_model.(models, Ref(X), Ref(Y))...; layout=(1, length(models)), size=(1000, 200) ) # Plot all models and combine the plots likelihoods = [ StudentTLikelihood(3.0), LaplaceLikelihood(3.0), HeteroscedasticLikelihood(1.0) ] ngmodels = Vector{AbstractGP}(undef, length(likelihoods) + 1) for (i, l) in enumerate(likelihoods) @info "Training with the $(l)" # We need to use VGP m = VGP( X, Y, # First arguments are the input and output kernel, # Kernel l, # Likelihood used AnalyticVI(); # Inference usede to solve the problem optimiser=false, # Keep kernel parameters fixed ) @time train!(m, 10) # Train the model for 100 iterations ngmodels[i] = m # Save the model in the array end ngmodels[end] = models[end] # Add the Gaussian model Plots.plot( plot_model.( ngmodels, Ref(X), Ref(Y), ["Student-T", "Laplace", "Heteroscedastic", "Gaussian"] )...; layout=(2, 2), size=(1000, 200), ) # Plot all models and combine the plots