versioninfo() using Pkg Pkg.activate("../..") Pkg.status() using Random, LinearAlgebra, SparseArrays Random.seed!(123) # seed n, p = 100, 50 X = rand(n, p) lmul!(Diagonal(1 ./ vec(sum(X, dims=2))), X) β = sprandn(p, 0.1) # sparse vector with about 10% non-zero entries y = X * β + randn(n); using Convex β̂cls = Variable(size(X, 2)) problem = minimize(0.5sumsquares(y - X * β̂cls)) # objective problem.constraints += sum(β̂cls) == 0; # sum-to-zero constraint using Mosek, MosekTools opt = () -> Mosek.Optimizer(LOG=1) # anonymous function @time solve!(problem, opt) # Check the status, optimal value, and minimizer of the problem problem.status, problem.optval, β̂cls.value ENV["GRB_LICENSE_FILE"]="/Users/jhwon/gurobi/gurobi.lic" # set as YOUR path to license file using Gurobi const GRB_ENV = Gurobi.Env() opt = () -> Gurobi.Optimizer(GRB_ENV) # anonymous function @time solve!(problem, opt) # Check the status, optimal value, and minimizer of the problem problem.status, problem.optval, β̂cls.value # Use SCS solver using SCS opt = () -> SCS.Optimizer(verbose=1) # anonymous function @time solve!(problem, opt) # Check the status, optimal value, and minimizer of the problem problem.status, problem.optval, β̂cls.value # Use COSMO solver using COSMO opt = () -> COSMO.Optimizer(max_iter=5000) # anonymous function @time solve!(problem, opt) # Check the status, optimal value, and minimizer of the problem problem.status, problem.optval, β̂cls.value # Use Mosek solver #opt = () -> Mosek.Optimizer(LOG=0) ## Use Gurobi solver ##solver = Gurobi.Optimizer(GRB_ENV) #using JuMP #opt = () -> Gurobi.Optimizer(GRB_ENV) ##model = direct_model(Gurobi.Optimizer(GRB_ENV)) ##MOI.set(model, MOI.RawParameter("OutputFlag"), 0) ##set_optimizer_attribute(JuMP.Model(opt), "OutputFlag", 0) # Use SCS solver opt = () -> SCS.Optimizer(verbose=0) ## Use COSMO solver #opt = () -> COSMO.Optimizer(max_iter=10000, verbose=false) # solve at a grid of λ λgrid = 0:0.01:0.35 β̂path = zeros(length(λgrid), size(X, 2)) # each row is β̂ at a λ β̂classo = Variable(size(X, 2)) @time for i in 1:length(λgrid) λ = λgrid[i] # objective problem = minimize(0.5sumsquares(y - X * β̂classo) + λ * sum(abs, β̂classo)) # constraint problem.constraints += sum(β̂classo) == 0 # constraint solve!(problem, opt) β̂path[i, :] = β̂classo.value end using Plots; gr() p = plot(collect(λgrid), β̂path, legend=:none) xlabel!(p, "lambda") ylabel!(p, "beta_hat") title!(p, "Zero-sum Lasso") # Use Mosek solver #opt = () -> Mosek.Optimizer(LOG=0) ## Use Gurobi solver ##solver = Gurobi.Optimizer(GRB_ENV) #using JuMP #opt = () -> Gurobi.Optimizer(GRB_ENV) ##model = direct_model(Gurobi.Optimizer(GRB_ENV)) ##MOI.set(model, MOI.RawParameter("OutputFlag"), 0) ##set_optimizer_attribute(JuMP.Model(opt), "OutputFlag", 0) # Use SCS solver opt = () -> SCS.Optimizer(verbose=0) ## Use COSMO solver #opt = () -> COSMO.Optimizer(max_iter=5000, verbose=false) # solve at a grid of λ λgrid = 0.1:0.005:0.5 β̂pathgrp = zeros(length(λgrid), size(X, 2)) # each row is β̂ at a λ β̂classo = Variable(size(X, 2)) @time for i in 1:length(λgrid) λ = λgrid[i] # loss obj = 0.5sumsquares(y - X * β̂classo) # group lasso penalty term for j in 1:(size(X, 2)/10) βj = β̂classo[(10(j-1)+1):10j] obj = obj + λ * norm(βj) end problem = minimize(obj) # constraint problem.constraints += sum(β̂classo) == 0 # constraint solve!(problem, opt) β̂pathgrp[i, :] = β̂classo.value end p2 = plot(collect(λgrid), β̂pathgrp, legend=:none) xlabel!(p2, "lambda") ylabel!(p2, "beta_hat") title!(p2, "Zero-Sum Group Lasso") using Images lena = load("lena128missing.png") # convert to real matrices Y = Float64.(lena) # Use COSMO solver using COSMO opt = () -> COSMO.Optimizer() ## Use SCS solver #using SCS #opt = () -> SCS.Optimizer() ## Use Mosek solver #using Mosek #opt = () -> Mosek.Optimizer() # Linear indices of obs. entries obsidx = findall(Y[:] .≠ 0.0) # Create optimization variables X = Convex.Variable(size(Y)) # Set up optmization problem problem = minimize(nuclearnorm(X)) problem.constraints += X[obsidx] == Y[obsidx] # Solve the problem by calling solve @time solve!(problem, opt) colorview(Gray, X.value) using Random, Statistics, SpecialFunctions Random.seed!(280) function gamma_logpdf(x::Vector, α::Real, β::Real) m = length(x) avg = mean(x) logavg = sum(log, x) / m m * (- loggamma(α) + α * log(β) + (α - 1) * logavg - β * avg) end x = rand(5) gamma_logpdf(x, 1.0, 1.0) using ForwardDiff ForwardDiff.gradient(θ -> gamma_logpdf(x, θ...), [1.0; 1.0]) ForwardDiff.hessian(θ -> gamma_logpdf(x, θ...), [1.0; 1.0]) using Distributions, Random Random.seed!(280) (n, p) = (1000, 2) (α, β) = 5.0 * rand(p) x = rand(Gamma(α, β), n) println("True parameter values:") println("α = ", α, ", β = ", β) using JuMP, Ipopt, NLopt #m = Model(with_optimizer(Ipopt.Optimizer, print_level=3)) m = Model( optimizer_with_attributes( NLopt.Optimizer, "algorithm" => :LD_MMA ) ) myf(a, b) = gamma_logpdf(x, a, b) JuMP.register(m, :myf, 2, myf, autodiff=true) @variable(m, α >= 1e-8) @variable(m, β >= 1e-8) @NLobjective(m, Max, myf(α, β)) print(m) status = JuMP.optimize!(m) println("MLE (JuMP):") println("α = ", JuMP.value(α), ", β = ", JuMP.value(β)) println("Objective value: ", JuMP.objective_value(m)) println("α = ", JuMP.value(α), ", θ = ", 1 / JuMP.value(β)) println("MLE (Distribution package):") println(fit_mle(Gamma, x))