#!/usr/bin/env python # coding: utf-8 # # # # # Image Filtering using Discrete Fourier Transform # # ### Examples - Optics #
# By Jonas Tjemsland, Håkon Ånes, Andreas Krogen and Jon Andreas Støvneng #
#
# Edited by Thorvald Ballestad, Jenny Lunde, Sondre Duna Lundemo (2021) #
# Last edited: February 17th 2021 # # ___ # ## Introduction # # In this notebook we will discuss the two dimensional Fourier Transform (FT) and use it to filter images. The theory behind FTs is already discussed in the notebook *[Discrete Fourier Transform and Fast Fourier Transform](https://nbviewer.jupyter.org/url/www.numfys.net/media/notebooks/discrete_fourier_transform.ipynb)*, and one dimensional FTs are used in filtering sound files in the notebook *[Simple Sound Filtering using Discrete Fourier Transform](https://nbviewer.jupyter.org/url/www.numfys.net/media/notebooks/simple_sound_filtering.ipynb)*. If you want a deeper mathematical and physical understanding of the concepts used, we refer you to these notebooks or one of the links at the bottom of this notebook. Remember that we can interpret the Fourier Transform of the spatial domain (here the images) as the frequency domain, or frequency spectrum. For simplicity we will use images in gray-scale, but this can easily be generalised. # In[1]: import warnings import matplotlib.pyplot as plt import numpy as np import scipy.fftpack as fft from skimage import io get_ipython().run_line_magic('matplotlib', 'inline') # Taking the logarithm of zero in visualising the spectrum gives warnings, # hence we disable them warnings.filterwarnings("ignore") # ## Fourier transforming an image # # In order to simplify and minimize the code implementation, we create a function to visualise an image and its corresponding spectrum in the frequency domain. Note that we plot the logarithm of the absolute value, as we did in the aforementioned notebook on sound filtering. # In[2]: def visualise(image, spectrum, title="", title_img="", title_spec="", cmap="gray"): """Visualise an image and its corresponding frequency spectrum in one figure. Parameters: image: array-like, as in plt.imshow(). Image. spectrum: array-like, as in plt.imshow(). Frequency spectrum. title: string. Figure title. Optional. title_img. string. Image subtitle. Optional. title_spec. string. Spectrum subtitle. Optional. cmap: `~matplotlib.colors.Colormap`. Color map. Optional. """ plt.figure(figsize=(18, 5)) plt.subplot(121) plt.imshow(image, cmap=cmap) plt.title(title_img) plt.subplot(122) plt.imshow(np.log(abs(spectrum)), cmap=cmap) plt.title(title_spec) plt.suptitle(title) plt.show() # Furthermore, we create two functions for calculating the Fourier Transform and the inverse Fourier Transform of an image. Note that these functions also shift the quadrants, such that low frequencies are collected towards the middle of the spectrum and the frequency increases with the distance from the center. This is in accordance with most tutorials online. However, it is not strictly necessary. # In[3]: def FFT(image): """Compute the 2D discrete Fast Fourier Transform of an image and shift the zero-frequency component to the center of the spectrum. Parameters: image: array-like, shape (m,n), can be complex. Returns: complex ndarray, shape (m,n). Frequency spectrum. """ return np.fft.fftshift(np.fft.fft2(image)) def IFFT(spectrum): """Shift the zero-frequency component to the edges of the spectrum and compute the inverse 2D discrete Fast Fourier Transform of an image. Parameters: spectrum: array-like, shape (m,n), can be complex. Returns: complex ndarray, shape (m,n). Spacial domain. """ image = np.fft.ifft2(np.fft.ifftshift(spectrum)) return image # Let us take the FT of an image and plot the result. # In[4]: img = io.imread("https://www.numfys.net/media/notebooks/images/dog.jpg", as_gray=True) # You may also play around with these images: (last tested March 22nd 2018) # img = io.imread('https://upload.wikimedia.org/wikipedia/commons/5/50/Albert_Einstein_%28Nobel%29.png',True) # img = io.imread('https://upload.wikimedia.org/wikipedia/en/4/42/Richard_Feynman_Nobel.jpg',True) # img = io.imread('https://upload.wikimedia.org/wikipedia/commons/c/cf/Dirac_4.jpg',True) # img = io.imread('https://upload.wikimedia.org/wikipedia/commons/7/73/Stephen_Hawking_in_Cambridge.jpg',True) spec = FFT(img) visualise(img, spec, title_img="Input image", title_spec="Spectrum") # ## Filtering # # There is an endless number of different filters that can be used on the frequency domain to filter the original image. We will implement filters that filter out low and high frequencies in a highpass and lowpass filter, respectively. An ideal filter and a gaussian filter is used in both cases. # # Assume that the image in question has $n$ horizontal pixels ($x$-direction) and $m$ vertical pixels ($y$-direction). Let $x=0,1,...,n-1$ and $y=0,1,..,m-1$ define a square lattice $x \times y$. Now denote the frequency spectrum as $\mathcal F$, the ideal filters as $I_\text{lowpass}$ and $I_\text{highpass}$ and the gaussian filters as $G_\text{lowpass}$ and $G_\text{highpass}$, all of which are defined on the lattice $x \times y$. We define these filters such that a filtered spectrum is given by the *Hadamard product* (elementwise matrix product) $\mathcal F\circ I$ and $\mathcal F\circ G$. # # $r=\sqrt{(x-n/2)^2+(y-m/2)^2}\geq0$ is the distance from the center of the spectrum. The filters we consider depend only on one parameter $\xi\in \mathbb{R}^+$. In this notation the ideal filters are given by # # $$ # I_\text{lowpass}=\begin{cases} # 1,& \text{for } r\leq\xi,\\ # 0,& \text{for } r>\xi, # \end{cases} # \quad \text{and} \quad # I_\text{highpass}=\begin{cases} # 1,& \text{for } r\geq\xi,\\ # 0,& \text{for } r<\xi, # \end{cases} # $$ # # and the gaussian filters are given by # # $$ # G_\text{lowpass}=e^{-\xi r^2} # \quad \text{and} \quad # G_\text{highpass}=1-e^{-\xi r^2}. # $$ # In[5]: def filter_spectrum(spectrum, filter_type=None, val=50): """Filters the spectrum of an image using one of the filters defined in the text above. Parameters: spectrum: array-like, shape (m, n), can be complex. Spectrum being filtered. filter_type: string. Filter name, {'lowpass', 'highpass', 'gaussian_highpass', 'gaussian_lowpass'}. val: float. Filter parameter. Returns: complex ndarray, shape (m, n). Filtered spectrum. """ n, m = np.shape(spectrum) y, x = np.meshgrid(np.arange(1, m + 1), np.arange(1, n + 1)) R2 = (x - n / 2) ** 2 + (y - m / 2) ** 2 if filter_type == "lowpass": return spectrum * (R2 <= val ** 2) elif filter_type == "highpass": return spectrum * (R2 >= val ** 2) elif filter_type == "gaussian_highpass": return spectrum * (1 - np.exp(-val * R2)) elif filter_type == "gaussian_lowpass": return spectrum * np.exp(-val * R2) elif filter_type != None: raise ValueError("%s is not a valid filter!" % (filter_type)) return spectrum # We now use these filters on the frequency spectrum and inverse Fourier Transforms to obtain the filtered images. Notice that the images filtered using the gaussian filters are somewhat smoother than those filtered with the ideal filters. # # # Play around with the code snippets! # ### Lowpass # In[6]: filtered_spec = filter_spectrum(spec, "lowpass", 40) filtered_img = np.real(IFFT(filtered_spec)) visualise(filtered_img, filtered_spec, title="Ideal lowpass filtering") # In[7]: filtered_spec = filter_spectrum(spec, "gaussian_lowpass", 0.001) filtered_img = np.real(IFFT(filtered_spec)) visualise(filtered_img, filtered_spec, title="Gaussian lowpass filtering") # ### Highpass # In[8]: filtered_spec = filter_spectrum(spec, "highpass", 10) filtered_img = np.real(IFFT(filtered_spec)) visualise(filtered_img, filtered_spec, title="Ideal highpass filtering") # In[9]: filtered_spec = filter_spectrum(spec, "gaussian_highpass", 0.01) filtered_img = np.real(IFFT(filtered_spec)) visualise(filtered_img, filtered_spec, title="Gaussian highpass filtering") # ## Filtering out specific patterns and noise # # The consepts introduced in this notebook can easily be applied in image filtering, in the same way as one dimensional Fourier Transforms were used to filter sound files in the aforementioned notebook on sound filtering. In the two following examples we include an image with some noise patterns. These images are Fourier Transformed to the frequency domain. Any anomalies are removed and we obtain a filtered image by transforming back to the spacial domain. # ### Example 1 # In[10]: img = io.imread("https://www.numfys.net/media/notebooks/images/dog_noise1.jpg", True) spec = FFT(img) visualise(img, spec, title_img="Noisy image", title_spec="Unfiltered spectrum") # In[11]: # Create a filter which filters out the circular anomaly n, m = np.shape(img) y, x = np.meshgrid(np.arange(m), np.arange(n)) R2 = (x - n / 2) ** 2 + (y - m / 2) ** 2 filtered_spec = spec * (R2 <= 100 ** 2) + spec * (R2 >= 130 ** 2) filtered_img = np.real(IFFT(filtered_spec)) # Visualise result visualise( filtered_img, filtered_spec, title_img="Filtered image", title_spec="Filtered spectrum", ) # ### Example 2 # In[12]: img = io.imread("https://www.numfys.net/media/notebooks/images/dog_noise2.jpg", True) spec = FFT(img) visualise(img, spec, title_img="Noisy image", title_spec="Unfiltered spectrum") # In[13]: # Create a filter which filters out the anomaly on the axes (+) n, m = np.shape(img) y, x = np.meshgrid(np.arange(m), np.arange(n)) filtered_spec = spec * (x <= n / 2 - 1) + spec * (x >= n / 2 + 1) filtered_spec = filtered_spec * (y <= m / 2 - 1) + filtered_spec * (y >= m / 2 + 1) filtered_img = np.real(IFFT(filtered_spec)) # Visualise result visualise( filtered_img, filtered_spec, title_img="Filtered image", title_spec="Filtered spectrum", ) # # A class based implementation # We at NumFys recently wrote a guide on [object oriented programming(OOP) in Python](https://nbviewer.jupyter.org/urls/www.numfys.net/media/notebooks/classes_in_Python.ipynb). # OOP is a powerful paradigm which certainly has its place in numerical programming. # In our notebook on [neural networks](https://nbviewer.jupyter.org/urls/www.numfys.net/media/notebooks/ML_from_scratch_tekst.ipynb) we exploited a class-based structure to keep the code base tidy. # That notebook is, however, quite intricate and only demonstrates some of the features of OOP. # We therefore revisited this old gem of a notebook, to demonstrate the power of inheritance in OOP on less complex problem. # We hope that seeing the image filtering implemented both in a function-based paradigm and using a class-based approach will help you in developing a mindset for OOP-development. # # We firstly create a class `Filter`, which will serve as our base class. # There is never created any instances of this class; # for those who are familiar with OOP in other languages, we treat this class as an abstract or virtual class. # All of the filters implemented above has in common that they do some transformation on the spectrum, the fourier transformation, of the image. # We therefore create a class `SpectrumFilter`, which is a subclass of `Filter`, and implement each of the spectrum filters as a subclass of this. # The reason for adding a `SpectrumFilter` and not simply using `Filter` is that we for demonstration purposes wish to add some more filters that are not based on the spectrum towards the end. # In[14]: class Filter: """Abstract class for a filter. Subclasses must implement the `render` method.""" def __init__(self, val=1): # Val is some parameter used by the filter. # Its specific meaning varies between filters. self.val = val def render(self, image): """Applies the filter to `image`.""" return image def visualize( self, title="", title_img="", title_spec="", cmap="gray", figsize=(18, 5) ): assert hasattr(self, "image"), "The filter has not rendered any image!" if not hasattr(self, "spectrum"): self.spectrum = FFT(self.image) plt.figure(figsize=figsize) plt.subplot(121) plt.imshow(self.image, cmap=cmap, vmin=0, vmax=1) plt.title(title_img) plt.xticks([]) plt.yticks([]) plt.subplot(122) plt.imshow(np.log(abs(self.spectrum)), cmap=cmap) plt.title(title_spec) plt.xticks([]) plt.yticks([]) plt.suptitle(title) plt.show() def render_and_visualize(self, image, **kwargs): """Convenience function for rendering and visualizing.""" self.render(image) self.visualize(**kwargs) return self.image def __call__(self, img): return self.render_and_visualize(img) class SpectrumFilter(Filter): """Abstract class for a spectrum filter. A spectrum filter does some transformation to the spectrum (FT) of the image.""" def spectrum_criterion(self, R2, val): pass def render(self, image): spectrum = FFT(image) n, m = np.shape(spectrum) y, x = np.meshgrid(np.arange(1, m + 1), np.arange(1, n + 1)) R2 = (x - n / 2) ** 2 + (y - m / 2) ** 2 spectrum *= self.spectrum_criterion(R2) # Store spectrum and image self.spectrum = spectrum self.image = np.real(IFFT(spectrum)) # Rescale image to be within 0 1 a, b = np.min(self.image), np.max(self.image) self.image = (self.image - a) / (b - a) return self.image class HighpassSpectrumFilter(SpectrumFilter): def spectrum_criterion(self, R2): return R2 >= self.val ** 2 class LowpassSpectrumFilter(SpectrumFilter): def spectrum_criterion(self, R2): return R2 <= self.val ** 2 class GaussianHighpassSpectrumFilter(SpectrumFilter): def spectrum_criterion(self, R2): return 1 - np.exp(-self.val * R2) class GaussianLowpassSpectrumFilter(SpectrumFilter): def spectrum_criterion(self, R2): return np.exp(-self.val * R2) class CircularSpectrumFilter(SpectrumFilter): def __init__(self, r1, r2, include=False): self.r1, self.r2 = sorted([r1, r2]) self.include = include def spectrum_criterion(self, R2): # r1 and r2 defines a disc, where r1 < r2. on_disc = np.logical_and(R2 >= self.r1 ** 2, R2 <= self.r2 ** 2) # If include is True, keep the disc and filter out everything else. # Else, filter out the disc. if self.include: return on_disc else: return np.logical_not(on_disc) # In[15]: # You may also play around with these images: (last tested January 22nd 2021) # img = io.imread('https://upload.wikimedia.org/wikipedia/commons/5/50/Albert_Einstein_%28Nobel%29.png',True) img = io.imread( "https://upload.wikimedia.org/wikipedia/en/4/42/Richard_Feynman_Nobel.jpg", True ) # img = io.imread('https://www.numfys.net/media/notebooks/images/dog.jpg', as_gray=True) # img = io.imread('https://upload.wikimedia.org/wikipedia/commons/c/cf/Dirac_4.jpg',True) # img = io.imread('https://upload.wikimedia.org/wikipedia/commons/7/73/Stephen_Hawking_in_Cambridge.jpg',True) # In[16]: ghp_filter = GaussianHighpassSpectrumFilter(0.005) hp_filter = HighpassSpectrumFilter(10) ghp_filter.render_and_visualize(img) hp_filter.render_and_visualize(img); # In[17]: glp_filter = GaussianLowpassSpectrumFilter(0.005) lp_filter = LowpassSpectrumFilter(10) glp_filter.render_and_visualize(img) # We use the __call__ method. lp_filter(img); # In[18]: circular_filter = CircularSpectrumFilter(100, 130) img_w_noise = io.imread( "https://www.numfys.net/media/notebooks/images/dog_noise1.jpg", True ) circular_filter(img_w_noise); # One advantage of our class based approach, is that extending our machinery is simple. # We create two new types of filters, to demonstrate the expandability of our system. # In[19]: class ContrastFilter(Filter): """A very naive implmentation of a contrast filter.""" def render(self, image): x = self.val self.image = np.clip(image * x + 0.5 * (1 - x), 0, 1) return self.image class CombineFilter(Filter): """A filter which executes an arbitrary amount of other filters in sequence.""" def __init__(self, filter_1, filter_2, *args): self.filters = [filter_1, filter_2, *args] def render(self, image): self.image = image for filter in self.filters: self.image = filter.render(self.image) return self.image def render_and_visualize_step(self, image): self.image = image self.spectrum = FFT(image) self.visualize() for filter in self.filters: print(filter) self.image = filter.render_and_visualize(self.image) return self.image def __add__(self, other_filter): self.filters.append(other_filter) return self def __add__(self, other_filter): return CombineFilter(self, other_filter) setattr(Filter, "__add__", __add__) # In[20]: cf = ContrastFilter(1.5) cf(img); # In[21]: blur_contrast = CombineFilter(glp_filter, cf) blur_contrast_2 = glp_filter + cf # The same as above, but using overridden operators. blur_contrast(img) blur_contrast_2(img); # Lastly, see if you based on the following code understand why `CombineFilter` got a different `__add__` method than the other filters. # In[22]: (glp_filter + cf + circular_filter)(img) # Uncomment to see each step performed. # (cf + circular_filter + glp_filter).render_and_visualize_step(img) (cf + circular_filter + glp_filter)(img); # ## Further work and improvements # + How do we do this with a image with colors? *Hint: how do a computer store an image with colors?* # ### Further reading # # All of them acquired 10-28-2016. # # * Robert Fisher, Simon Perkins, Ashley Walker and Erik Wolfart: Fourier Transform, http://homepages.inf.ed.ac.uk/rbf/HIPR2/fourier.htm, 2000. # * Paul Bourke: Image Filtering in the Frequency Domain, http://paulbourke.net/miscellaneous/imagefilter/, 1998. # * Steven Lehar: An Intuitive Explanation of Fourier Theory, http://cns-alumni.bu.edu/~slehar/fourier/fourier.html. # # Multi-dimensional image processing using scipy: https://docs.scipy.org/doc/scipy-0.18.1/reference/ndimage.html