%reset -f
%matplotlib inline
import numpy as np
import matplotlib.pyplot as plt
# Generate the signal and measurements
N = 128 # signal size
M = int(N/2) # number of measurements
x = np.random.randn(N, 1)*np.random.binomial(1,0.1,(N, 1)) # generate a sparse signal
H = (1/M)*np.random.randn(M, N) # generate a measurement matrix
y = np.dot(H, x) # measure the signal
# plot the data
plt.figure()
plt.plot(range(N), x, 'r')
plt.title('signal')
plt.xlim(1, N)
plt.show()
plt.figure()
plt.plot(range(M), y, 'g-')
plt.title('measurements')
plt.xlim(1, M)
plt.show()
# specify soft-thresholding function and energy function
def eta(x, lam):
"""soft-thresholding function"""
# set small values to zero
ind = np.abs(x) <= lam
x[ind] = 0
# shrink large values
ind = np.abs(x) > lam
x[ind] = x[ind] - np.sign(x[ind])*lam
return x
def computeCost(x, y, H, lam):
"""cost-functional"""
# data-term
data = np.square(np.linalg.norm(np.dot(H, x)-y))
# l1-term
reg = np.sum(np.abs(x))
return (data+lam*reg)
# parameters of ISTA
numIter = 1000 # number of iterations
gamma = np.square(1/np.linalg.norm(H, 2)) # step-size
lam = 0.001 # regularization parameter
costs = np.zeros((numIter, 1)) # tracks cost evolution
# run ISTA iterations
xhat = np.zeros((N, 1)) # initialize ISTA
for iter in range(numIter):
grad = np.dot(np.transpose(H), np.dot(H, xhat)-y) # compute grad
xhat = eta(xhat - gamma*grad, gamma*lam) # update the solution
costs[iter] = computeCost(xhat, y, H, lam) # track cost
# plot results
# show estimate
plt.figure()
plt.plot(range(N), x, '-r')
plt.plot(range(N), xhat, '-b')
plt.ylabel("y")
plt.xlabel("x")
plt.title("estimate")
plt.legend(("x", "xhat"))
# show cost
plt.figure()
plt.loglog(range(numIter), costs)
plt.xlabel("iter")
plt.ylabel("cost")
plt.title("Final cost = %.2e" % costs[numIter-1])
<matplotlib.text.Text at 0x11270ec50>