#!/usr/bin/env python # coding: utf-8 # # Entropy maximization # # A derivative work by Judson Wilson, 6/2/2014.
# Adapted from the CVX example of the same name, by Joƫlle Skaf, 4/24/2008. # # ## Introduction # # Consider the linear inequality constrained entropy maximization problem: # $$\begin{array}{ll} # \mbox{maximize} & -\sum_{i=1}^n x_i \log(x_i) \\ # \mbox{subject to} & \sum_{i=1}^n x_i = 1 \\ # & Fx \succeq g, # \end{array}$$ # where the variable is $x \in \mathbf{{\mbox{R}}}^{n}$. # # This problem can be formulated in CVXPY using the `entr` atom. # # ## Generate problem data # In[1]: import cvxpy as cp import numpy as np # Make random input repeatable. np.random.seed(0) # Matrix size parameters. n = 20 m = 10 p = 5 # Generate random problem data. tmp = np.random.rand(n) A = np.random.randn(m, n) b = A.dot(tmp) F = np.random.randn(p, n) g = F.dot(tmp) + np.random.rand(p) # ## Formulate and solve problem # In[2]: # Entropy maximization. x = cp.Variable(shape=n) obj = cp.Maximize(cp.sum(cp.entr(x))) constraints = [A*x == b, F*x <= g ] prob = cp.Problem(obj, constraints) prob.solve(solver=cp.ECOS, verbose=True) # Print result. print("\nThe optimal value is:", prob.value) print('\nThe optimal solution is:') print(x.value)