import numpy as np
import matplotlib.pyplot as plt
import time
from TVL2 import *
from TVL2_sampling import *
In the following, we wish to draw samples from the distribution $$ \pi_{H}(x) \propto e^{-H(x)},$$ assuming that $\int_x e^{-H(x)} < +\infty$. For instance, if we want to know what images with a TV prior look like, we can choose $H(x) = \lambda \mathrm{TV}(x) + \epsilon \|x\|^2$ for a very small value of $\epsilon$ (the term $\epsilon \|x\|^2$ ensures $\int_x e^{-H(x)} < +\infty$).
In order to draw from $ \pi_{H}$, we will use a Metropolis algorithm, inspired by what is used in
We will draw $n_r\times n_c$ images and we call $\Gamma$ the discrete grid $\{1,\dots,n_r\}\times \{1,\dots,n_c\}$.
noise = np.random.rand(50,50)
def TV1(u):
return np.sum(np.sum(np.abs(np.roll(u,1,axis=0)- u)+np.abs(np.roll(u,1,axis=1) - u)))
Npas = int(1e4)
alpha = 0.1
beta = [1,10,100]
t0 = time.time()
out = []
for t in range(len(beta)):
x = np.copy(noise) # initialization
temp = metropolis_TV1(x,alpha,Npas,beta[t],1e-12)
out = out + [temp]
t1 = time.time()
print(t1-t0)
9.2619309425354
fig, axes = plt.subplots(nrows=1, ncols=3, figsize=(12, 4))
axes[0].imshow(out[0],cmap='gray')
axes[0].set_title('beta = 1 ')
axes[1].imshow(out[1],cmap='gray',vmin =0,vmax = 1 )
axes[1].set_title('beta = 10')
axes[2].imshow(out[2],cmap='gray',vmin =0,vmax = 1 )
axes[2].set_title('beta = 100')
Text(0.5, 1.0, 'beta = 100')
# To write the result on your disk
#import matplotlib
#matplotlib.image.imsave('sample_TV_beta1.png', out[0], cmap='gray')
#matplotlib.image.imsave('sample_TV_beta10.png', out[1], vmin =0,vmax = 1,cmap='gray')
#matplotlib.image.imsave('sample_TV_beta100.png', out[2], vmin =0,vmax = 1,cmap='gray')
Now, we wish to sample from the distribution $$\pi(u) \propto e^{-\left(\lambda \mathrm{TV(u)+\|u-u_0\|^2/{\sigma^2}}\right)}$$
We can use exactly the same approach as before.
# ub is the noisy image
def TVL2(x,ub,sigma,lambd):
return np.sum(np.sum(0.5*(x-ub)**2))/sigma**2 + lambd*TV1(x)
lambd = 20 # TV regularization parameter
sigma = 0.05 # noise standard deviation
# image creation
n = 100 # start with small images for your experimentations
i = 100
u = plt.imread('../im/simpson512.png')
u = u[:,:,1]
u = u[i:i+n,i:i+n]
nr,nc = u.shape
# add noise
ub = u + sigma*np.random.randn(nr,nc)
noise = np.random.rand(nr,nc)
# TV-MAP
u_tvmap = chambolle_pock_prox_TV1(ub,sigma**2*lambd,100)
# parameters for the MCMC
Npas = int(3e4) # choose Npas < 1e4 for your experiments
alpha = 0.1
t0 = time.time()
# Metropolis algorithm
x = np.copy(noise) # initialization
x,xmean,stdx = metropolis_TVL2(x,alpha,Npas,ub,sigma,lambd)
t1 = time.time()
print(t1-t0)
fig, axes = plt.subplots(nrows=1, ncols=4, figsize=(20, 5))
axes[0].imshow(ub,cmap='gray')
axes[0].set_title('Noisy image')
axes[1].imshow(u_tvmap,cmap='gray')
axes[1].set_title('TV MAP')
axes[2].imshow(xmean,cmap='gray')
axes[2].set_title('Conditional expectation')
axes[3].imshow(stdx,cmap='gray')
axes[3].set_title('Standard deviation')
fig.tight_layout()
28.64272689819336
In the following, we apply the same approach as above for an interpolation problem. In other terms, we wish to sample from the distribution $$\pi(u) \propto e^{-\left(\lambda \mathrm{TV(u)+\|u-u_0\|^2/{\sigma^2}}\right)}$$ where $A$ is a masking operator (diagonal matrix with $0$ and $1$ on the diagonal). We can use exactly the same approach as before.
lambd = 20 # TV regularization parameter
sigma = 0.01 # noise standard deviation
# image creation
n = 100 # start with small images for your experimentations
i = 100
u = plt.imread('../im/simpson512.png')
u = u[i:i+n,i:i+n,1]
nr,nc = u.shape
# add mask and noise
mask = np.random.rand(nr,nc)<0.5
ub = u*mask + sigma*np.random.randn(nr,nc)
fig, axes = plt.subplots(nrows=1, ncols=2, figsize=(15,7))
axes[0].imshow(u,cmap='gray')
axes[0].set_title('u')
axes[1].imshow(ub,cmap='gray')
axes[1].set_title('u masked')
fig.tight_layout()
# init of the chain
noise = np.random.randn(nr,nc)
# TV-MAP (TV2 here)
u_tvmap = chambolle_pock_interpolation_TV2(ub,mask,sigma**2*lambd,200)
# parameters for the MCMC
Npas = int(1e4)
alpha = 0.1
t0 = time.time()
# Metropolis algorithm (beware, TV1 here)
x = np.copy(noise) # initialization
x,xmean,stdx = metropolis_TVL2A(x,mask,alpha,Npas,ub,sigma,lambd)
t1 = time.time()
print(t1-t0)
10.583667993545532
fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(20,20))
axes[0,0].imshow(ub,cmap='gray')
axes[0,0].set_title('ub')
axes[0,1].imshow(u_tvmap,cmap='gray')
axes[0,1].set_title('TV MAP')
axes[1,1].imshow(xmean,cmap='gray')
axes[1,1].set_title('conditional expectation')
axes[1,0].imshow(stdx,cmap='gray')
axes[1,0].set_title('std')
fig.tight_layout()