#!/usr/bin/env python # coding: utf-8 #

Table of Contents

#
# # DeepLearning from scratch # Here is implementation of Neural Network from scratch without using any libraries of ML Only numpy is used for NN and matplotlib for plotting the results. # # **Objective**: Objective of this exercise is to understand what difference layers learn, how different activation functions affect the learning rate and importantly what different neurons learn with different activation functions. # ## Features # **Implementation includes following** # # * **Optimization**: Gradient Decent, Momentum, RMSprop, Adam (RMS+ Momentum) # # * **Regularization**: L2 Penalization, Dropouts # # * **Activation Function**: Sigmoid, Tanh, Relu, LeakyRelu, Softmax # * **Data set:**: Two class dataset (Gaussian, Linear, Moons, Spiral, Sinasodal) and Multiclass (Gaussian distribuated data upto 9 classes) # ## All you need to import # In[1]: import numpy as np import matplotlib.pyplot as plt get_ipython().run_line_magic('matplotlib', 'notebook') # DL library (code included) from DeepNet import deepNet # Toy Datasets (simulated) import DataSet as ds # Other datasets from sklearn import datasets # ## Toy examples # ### Example 1 (Moons, 2 classes) # **Data** # In[31]: dtype = ['MOONS','GAUSSIANS','LINEAR','SINUSOIDAL','SPIRAL'] # Moons data #Training: N=100 examples and no noise Xr, yr,_ = ds.create_dataset(100, dtype[0],noise=0.0,varargin = 'PRESET'); #Testing: N=100 examples and 10% noise Xs, ys,_ = ds.create_dataset(100, dtype[0],noise=0.1,varargin = 'PRESET'); print(Xr.shape, yr.shape,Xs.shape, ys.shape) print('#Features: ',Xr.shape[0]) print('#Examples: ',Xr.shape[1]) # **Neural Network** :: Hidden Layers : [3,4] # In[32]: NN = deepNet(X=Xr,y=yr,Xts=Xs, yts=ys, Net = [3,4],NetAf =['tanh'], alpha=0.01, miniBatchSize = 0.3,printCostAt =20,AdamOpt=True,lambd=0,keepProb =[1.0]) # **Training and plotting** # In[33]: get_ipython().run_line_magic('matplotlib', 'notebook') # In[34]: fig1=plt.figure(1,figsize=(8,4)) fig2=plt.figure(2,figsize=(8,5)) for i in range(20): ## 20 times NN.fit(itr=10) ## itr=10 iteretion each time NN.PlotLCurve(pause=0) fig1.canvas.draw() NN.PlotBoundries(Layers=True,pause=0) fig2.canvas.draw() NN.PlotLCurve() NN.PlotBoundries(Layers=True) print(NN) yri,yrp = NN.predict(Xr) ysi,ysp = NN.predict(Xs) print('Training Accuracy ::',100*np.sum(yri==yr)/yri.shape[1]) print('Testing Accuracy ::',100*np.sum(ysi==ys)/ysi.shape[1]) # In[35]: plt.close(fig1) plt.close(fig2) # ### Example 2 (Sinusoidal, 2 classes) # **Data** # In[7]: dtype = ['MOONS','GAUSSIANS','LINEAR','SINUSOIDAL','SPIRAL'] #Training: N=200 examples and no noise Xr, yr,_ = ds.create_dataset(200, dtype[3],0.0,varargin = 'PRESET'); #Testing: N=200 examples and 10% noise Xs, ys,_ = ds.create_dataset(200, dtype[3],0.1,varargin = 'PRESET'); print(Xr.shape, yr.shape,Xs.shape, ys.shape) print('#Features: ',Xr.shape[0]) print('#Examples: ',Xr.shape[1]) # **Neural Network** :: Hidden Layers : [8,8,5] # In[8]: NN = deepNet(X=Xr,y=yr,Xts=Xs, yts=ys, Net = [8,8,5],NetAf =['tanh'], alpha=0.01, miniBatchSize = 0.3, printCostAt =100, AdamOpt=True,lambd=0,keepProb =[1.0]) # **Training and plotting** # In[10]: get_ipython().run_line_magic('matplotlib', 'notebook') # In[9]: plt.close(fig1) plt.close(fig2) fig1=plt.figure(1,figsize=(8,4)) fig2=plt.figure(2,figsize=(8,5)) for i in range(20): ## 20 times NN.fit(itr=10) ## itr=10 iteretion each time NN.PlotLCurve(pause=0) fig1.canvas.draw() NN.PlotBoundries(Layers=True,pause=0) fig2.canvas.draw() NN.PlotLCurve() NN.PlotBoundries(Layers=True) print(NN) yri,yrp = NN.predict(Xr) ysi,ysp = NN.predict(Xs) print('Training Accuracy ::',100*np.sum(yri==yr)/yri.shape[1]) print('Testing Accuracy ::',100*np.sum(ysi==ys)/ysi.shape[1]) # In[10]: plt.close(fig1) plt.close(fig2) # ### Example 3 (Gaussian, 4 classes) # **Data** (70-30 split) # In[11]: X, y = ds.mclassGaus(N=500, nClasses = 4,var =0.25,ShowPlot=False) [n,N] =X.shape r = np.random.permutation(N) split =int(0.7*N) Xr = X[:,r[:split]] yr = y[:,r[:split]] Xs = X[:,r[split:]] ys = y[:,r[split:]] print(Xr.shape, yr.shape,Xs.shape,ys.shape) print('#Features: ',Xr.shape[0]) print('#Examples: ',Xr.shape[1]) # **Neural Network** :: Hidden Layers : [8,8,5] # In[12]: NN = deepNet(X=Xr,y=yr,Xts=Xs, yts=ys, Net = [8,8,5],NetAf =['tanh'], alpha=0.01, miniBatchSize = 0.3,printCostAt =-1,AdamOpt=True,lambd=0,keepProb =[1.0]) # In[13]: plt.close(fig1) plt.close(fig2) fig1=plt.figure(1,figsize=(8,4)) fig2=plt.figure(2,figsize=(8,5)) for i in range(20): ## 20 times NN.fit(itr=10) ## itr=10 iteretion each time NN.PlotLCurve(pause=0) fig1.canvas.draw() NN.PlotBoundries(Layers=True,pause=0) fig2.canvas.draw() NN.PlotLCurve() NN.PlotBoundries(Layers=True) print(NN) yri,yrp = NN.predict(Xr) ysi,ysp = NN.predict(Xs) print('Training Accuracy ::',100*np.sum(yri==yr)/yri.shape[1]) print('Testing Accuracy ::',100*np.sum(ysi==ys)/ysi.shape[1]) # In[14]: plt.close(fig1) plt.close(fig2) # ### Reapeating example 3 with Relu activation # In[15]: print(Xr.shape, yr.shape,Xs.shape,ys.shape) print('#Features: ',Xr.shape[0]) print('#Examples: ',Xr.shape[1]) NN = deepNet(X=Xr,y=yr,Xts=Xs, yts=ys, Net = [8,8,5],NetAf =['relu'], alpha=0.01, miniBatchSize = 0.3,printCostAt =-1,AdamOpt=True,lambd=0,keepProb =[1.0]) plt.close(fig1) plt.close(fig2) fig1=plt.figure(1,figsize=(8,4)) fig2=plt.figure(2,figsize=(8,5)) for i in range(20): ## 20 times NN.fit(itr=10) ## itr=10 iteretion each time NN.PlotLCurve(pause=0) fig1.canvas.draw() NN.PlotBoundries(Layers=True,pause=0) fig2.canvas.draw() NN.PlotLCurve() NN.PlotBoundries(Layers=True) print(NN) yri,yrp = NN.predict(Xr) ysi,ysp = NN.predict(Xs) print('Training Accuracy ::',100*np.sum(yri==yr)/yri.shape[1]) print('Testing Accuracy ::',100*np.sum(ysi==ys)/ysi.shape[1]) # In[16]: plt.close(fig1) plt.close(fig2) # ## Real world Examples # ### MNIST Dataset (10 classes) # In[36]: Xy= datasets.load_digits() X = Xy['data'] y = Xy['target'] print(X.shape, y.shape) # In[37]: fig=plt.figure(1,figsize=(10,1)) for i in range(10): plt.subplot(1,10,i+1) plt.imshow(X[i].reshape([8,8]),cmap='gray',aspect='auto') plt.title('y :' + str(y[i])) plt.axis('off') plt.subplots_adjust(top=0.8,wspace=0.12, hspace=0) plt.show() # In[38]: plt.close(fig) # In[20]: N =X.shape[0] # total examples r = np.random.permutation(N) split=int(0.7*N) Xr = X[r[:split],:].T yr = y[r[:split]][None,:] Xs = X[r[split:],:].T ys = y[r[split:]][None,:] print(Xr.shape, yr.shape, Xs.shape, ys.shape) print('#Features: ',Xr.shape[0]) print('#Examples: ',Xr.shape[1]) # In[21]: NN = deepNet(X = Xr,y=yr,Xts=Xs, yts=ys, Net = [8,8,5],NetAf =['relu'], alpha=0.01, miniBatchSize = 0.3, printCostAt =10,AdamOpt=True,lambd=0,keepProb =[1.0]) # In[22]: plt.close(fig) fig=plt.figure(1,figsize=(8,4)) for i in range(100): NN.fit(itr=2) NN.PlotLCurve(pause=0) fig.canvas.draw() NN.PlotLCurve() print(NN) yri,yrp = NN.predict(Xr) ysi,ysp = NN.predict(Xs) print('Training Accuracy ::',100*np.sum(yri==yr)/yri.shape[1]) print('Testing Accuracy ::',100*np.sum(ysi==ys)/ysi.shape[1]) # In[23]: plt.close(fig) # ### Breast Cancer dataset (2 classes) # In[24]: Xy = datasets.load_breast_cancer() X = Xy['data'] y = Xy['target'] print(X.shape, y.shape) # In[25]: N =X.shape[0] # total examples r = np.random.permutation(N) split=int(0.7*N) Xr = X[r[:split],:].T yr = y[r[:split]][None,:] Xs = X[r[split:],:].T ys = y[r[split:]][None,:] print(Xr.shape, yr.shape, Xs.shape, ys.shape) print('#Features: ',Xr.shape[0]) print('#Examples: ',Xr.shape[1]) # **Normalizing Data** # In[26]: mn = np.mean(Xr,axis=1)[:,None] sd = np.std(Xr,axis=1)[:,None] Xrn=(Xr-mn)/sd Xsn=(Xs-mn)/sd print(Xrn.shape, yr.shape, Xsn.shape, ys.shape) # In[27]: NN = deepNet(X = Xrn,y=yr,Xts=Xsn, yts=ys, Net = [8,8,5],NetAf =['relu'], alpha=0.01, miniBatchSize = 0.3, printCostAt =10,AdamOpt=True,lambd=0,keepProb =[1.0]) # In[28]: plt.close(fig) fig=plt.figure(1,figsize=(8,4)) for i in range(100): NN.fit(itr=2) NN.PlotLCurve(pause=0) fig.canvas.draw() NN.PlotLCurve() print(NN) yri,yrp = NN.predict(Xrn) ysi,ysp = NN.predict(Xsn) print('Training Accuracy ::',100*np.sum(yri==yr)/yri.shape[1]) print('Testing Accuracy ::',100*np.sum(ysi==ys)/ysi.shape[1]) # In[29]: plt.close(fig) # **Nikesh Bajaj** # $$.$$ # # Email:\\ # n.bajaj@qmul.ac.uk \\ # bajaj.nikkey@gmail.com \\ # http://nikeshbajaj.in \\ #