using ForneyLab # Data y1_hat = 1.0 y2_hat = 2.0 # Construct the factor graph fg = FactorGraph() @RV x ~ GaussianMeanVariance(constant(0.0), constant(4.0), id=:x) # Node p(x) @RV y1 ~ GaussianMeanVariance(x, constant(1.0)) # Node p(y1|x) @RV y2 ~ GaussianMeanVariance(x, constant(2.0)) # Node p(y2|x) Clamp(y1, y1_hat) # Terminal (clamp) node for y1 Clamp(y2, y2_hat) # Terminal (clamp) node for y2 # draw(fg) # draw the constructed factor graph # Perform sum-product message passing eval(Meta.parse(sumProductAlgorithm(x, name="_algo1"))) # Automatically derives a message passing schedule x_marginal = step_algo1!(Dict())[:x] # Execute algorithm and collect marginal distribution of x println("Sum-product message passing result: p(x|y1,y2) = 𝒩($(mean(x_marginal)),$(var(x_marginal)))") # Calculate mean and variance of p(x|y1,y2) manually by multiplying 3 Gaussians (see lesson 4 for details) v = 1 / (1/4 + 1/1 + 1/2) m = v * (0/4 + y1_hat/1.0 + y2_hat/2.0) println("Manual result: p(x|y1,y2) = 𝒩($(m), $(v))") # Forward message towards Z fg = FactorGraph() @RV x ~ GaussianMeanVariance(constant(1.0), constant(1.0), id=:x) @RV y ~ GaussianMeanVariance(constant(2.0), constant(1.0), id=:y) @RV z = x + y; z.id = :z eval(Meta.parse(sumProductAlgorithm(z, name="_z_fwd"))) msg_forward_Z = step_z_fwd!(Dict())[:z] print("Forward message on Z: $(msg_forward_Z)") # Backward message towards X fg = FactorGraph() @RV x = Variable(id=:x) @RV y ~ GaussianMeanVariance(constant(2.0), constant(1.0), id=:y) @RV z = x + y GaussianMeanVariance(z, constant(3.0), constant(1.0), id=:z) eval(Meta.parse(sumProductAlgorithm(x, name="_x_bwd"))) msg_backward_X = step_x_bwd!(Dict())[:x] print("Backward message on X: $(msg_backward_X)") # Forward message towards Y fg = FactorGraph() @RV x ~ GaussianMeanVariance(constant(1.0), constant(1.0), id=:x) @RV y = constant(4.0) * x; y.id = :y eval(Meta.parse(sumProductAlgorithm(y, name="_y_fwd"))) msg_forward_Y = step_y_fwd!(Dict())[:y] print("Forward message on Y: $(msg_forward_Y)") # Backward message towards X fg = FactorGraph() x = Variable(id=:x) @RV y = constant(4.0) * x GaussianMeanVariance(y, constant(2.0), constant(1.0)) eval(Meta.parse(sumProductAlgorithm(x, name="_x_fwd2"))) msg_backward_X = step_x_fwd2!(Dict())[:x] print("Backward message on X: $(msg_backward_X)") using PyPlot, ForneyLab, LinearAlgebra # Parameters Σ = 1e5 * Diagonal(I,3) # Covariance matrix of prior on w σ2 = 2.0 # Noise variance # Generate data set w = [1.0; 2.0; 0.25] N = 30 z = 10.0*rand(N) x_train = [[1.0; z; z^2] for z in z] # Feature vector x = [1.0; z; z^2] f(x) = (w'*x)[1] y_train = map(f, x_train) + sqrt(σ2)*randn(N) # y[i] = w' * x[i] + ϵ scatter(z, y_train); xlabel(L"z"); ylabel(L"f([1.0, z, z^2]) + \epsilon") # Build factorgraph fg = FactorGraph() @RV w ~ GaussianMeanVariance(constant(zeros(3)), constant(Σ, id=:Σ), id=:w) # p(w) for t=1:N x_t = Variable(id=:x_*t) d_t = Variable(id=:d_*t) # d=w'*x DotProduct(d_t, x_t, w) # p(f|w,x) @RV y_t ~ GaussianMeanVariance(d_t, constant(σ2, id=:σ2_*t), id=:y_*t) # p(y|d) placeholder(x_t, :x, index=t, dims=(3,)) placeholder(y_t, :y, index=t); end # Build and run message passing algorithm eval(Meta.parse(sumProductAlgorithm(w))) data = Dict(:x => x_train, :y => y_train) w_posterior_dist = step!(data)[:w] # Plot result println("Posterior distribution of w: $(w_posterior_dist)") scatter(z, y_train); xlabel(L"z"); ylabel(L"f([1.0, z, z^2]) + \epsilon"); z_test = collect(0:0.2:12) x_test = [[1.0; z; z^2] for z in z_test] for sample=1:10 w = ForneyLab.sample(w_posterior_dist) f_est(x) = (w'*x)[1] plot(z_test, map(f_est, x_test), "k-", alpha=0.3); end using LinearAlgebra, ForneyLab include("scripts/cart_tracking_helpers.jl") # implements required factor nodes + helper functions # Specify the model parameters Δt = 1.0 # assume the time steps to be equal in size A = [1.0 Δt; 0.0 1.0] b = [0.5*Δt^2; Δt] Σz = convert(Matrix,Diagonal([0.2*Δt; 0.1*Δt])) # process noise covariance Σx = convert(Matrix,Diagonal([1.0; 2.0])) # observation noise covariance; # Generate noisy observations n = 10 # perform 10 timesteps z_start = [10.0; 2.0] # initial state u = 0.2 * ones(n) # constant input u noisy_x = generateNoisyMeasurements(z_start, u, A, b, Σz, Σx); fg = FactorGraph() z_prev_m = Variable(id=:z_prev_m); placeholder(z_prev_m, :z_prev_m, dims=(2,)) z_prev_v = Variable(id=:z_prev_v); placeholder(z_prev_v, :z_prev_v, dims=(2,2)) bu = Variable(id=:bu); placeholder(bu, :bu, dims=(2,)) @RV z_prev ~ GaussianMeanVariance(z_prev_m, z_prev_v, id=:z_prev) # p(z_prev) @RV noise_z ~ GaussianMeanVariance(constant(zeros(2), id=:noise_z_m), constant(Σz, id=:noise_z_v)) # process noise @RV z = constant(A, id=:A) * z_prev + bu + noise_z; z.id = :z # p(z|z_prev) (state transition model) @RV x ~ GaussianMeanVariance(z, constant(Σx, id=:Σx)) # p(x|z) (observation model) placeholder(x, :x, dims=(2,)); ForneyLab.draw(fg) include("scripts/cart_tracking_helpers.jl") eval(Meta.parse(sumProductAlgorithm(z))) # build message passing algorithm marginals = Dict() messages = Array{Message}(undef,6) z_prev_m_0 = zeros(2) z_prev_v_0 = 1e8*Diagonal(I,2) for t=1:n data = Dict(:x => noisy_x[t], :bu => b*u[t],:z_prev_m => z_prev_m_0, :z_prev_v => z_prev_v_0) step!(data, marginals, messages) # perform msg passing (single timestep) # Posterior of z becomes prior of z in the next timestep: # ForneyLab.ensureParameters!(marginals[:z], (:m, :v)) z_prev_m_0 = ForneyLab.unsafeMean(marginals[:z]) z_prev_v_0 = ForneyLab.unsafeCov(marginals[:z]) end println(messages[6].dist) # Collect prediction p(z[n]|z[n-1]), measurement p(z[n]|x[n]), corrected prediction p(z[n]|z[n-1],x[n]) prediction = messages[5].dist # the message index is found by manual inspection of the schedule measurement = messages[6].dist corr_prediction = convert(ProbabilityDistribution{Multivariate, GaussianMeanVariance}, marginals[:z]) # Make a fancy plot of the prediction, noisy measurement, and corrected prediction after n timesteps plotCartPrediction(prediction, measurement, corr_prediction); open("../../styles/aipstyle.html") do f display("text/html", read(f, String)) end