#!/usr/bin/env python # coding: utf-8 #

Table of Contents

#
# ## Computing entropy random signal # In[1]: import numpy as np import matplotlib.pyplot as plt import spkit as sp # In[2]: x = np.random.rand(10000) y = np.random.randn(10000) #Shannan entropy H_x= sp.entropy(x,alpha=1) H_y= sp.entropy(y,alpha=1) #Rényi entropy Hr_x= sp.entropy(x,alpha=2) Hr_y= sp.entropy(y,alpha=2) H_xy= sp.entropy_joint(x,y) H_x1y= sp.entropy_cond(x,y) H_y1x= sp.entropy_cond(y,x) I_xy = sp.mutual_Info(x,y) H_xy_cross= sp.entropy_cross(x,y) D_xy= sp.entropy_kld(x,y) print('Shannan entropy') print('Entropy of x: H(x) = ',H_x) print('Entropy of y: H(y) = ',H_y) print('-') print('Rényi entropy') print('Entropy of x: H(x) = ',Hr_x) print('Entropy of y: H(y) = ',Hr_y) print('-') print('Mutual Information I(x,y) = ',I_xy) print('Joint Entropy H(x,y) = ',H_xy) print('Conditional Entropy of : H(x|y) = ',H_x1y) print('Conditional Entropy of : H(y|x) = ',H_y1x) print('-') print('Cross Entropy of : H(x,y) = :',H_xy_cross) print('Kullback–Leibler divergence : Dkl(x,y) = :',D_xy) plt.figure(figsize=(12,5)) plt.subplot(121) sp.HistPlot(x,show=False) plt.subplot(122) sp.HistPlot(y,show=False) plt.show() # ## Entropy of EEG signal # In[3]: from spkit.data import load_data # In[14]: X,ch_names = load_data.eegSample() print(X.shape) print(ch_names) # In[12]: x1 =X[:,0] #'AF3' - Frontal Lobe x2 =X[:,6] #'O1' - Occipital Lobe #Shannan entropy H_x1= sp.entropy(x1,alpha=1) H_x2= sp.entropy(x2,alpha=1) #Rényi entropy Hr_x1= sp.entropy(x1,alpha=2) Hr_x2= sp.entropy(x2,alpha=2) #Joint entropy H_x12= sp.entropy_joint(x1,x2) #Conditional Entropy H_x12= sp.entropy_cond(x1,x2) H_x21= sp.entropy_cond(x2,x1) #Mutual Entropy I_x12 = sp.mutual_Info(x1,x2) #Cross Entropy H_x12_cross= sp.entropy_cross(x1,x2) #Diff Entropy D_x12= sp.entropy_kld(x1,x2) print('Shannan entropy') print('Entropy of x1: H(x1) =\t ',H_x1) print('Entropy of x2: H(x2) =\t ',H_x2) print('-') print('Rényi entropy') print('Entropy of x1: H(x1) =\t ',Hr_x1) print('Entropy of x2: H(x2) =\t ',Hr_x2) print('-') print('Joint Entropy H(x1,x2) =\t',H_x12) print('Mutual Information I(x1,x2) =\t',I_x12) print('Conditional Entropy of : H(x1|x2) =\t',H_x12) print('Conditional Entropy of : H(x2|x1) =\t',H_x21) print('-') print('Cross Entropy of : H(x1,x2) =\t',H_x12_cross) print('Kullback–Leibler divergence : Dkl(x1,x2) =\t',D_x12)