%pylab inline
from supportFunctions import *
import cPickle as pickle
import time
pickleFileName = 'data/fingerDataSet' + '.pickle'
pickleFile = open(pickleFileName, 'rb')
data = pickle.load(pickleFile)
pickleFile.close()
Populating the interactive namespace from numpy and matplotlib
#Return to just using 3 images at training data:
trainingExampleIndices = [7, 30, 46]
trainingExamples = [data[index] for index in trainingExampleIndices]
trainX, trainY = extractExamplesFromList(trainingExamples, whichImage = 'image1bit', dist = 4)
#And two for testing:
testingExampleIndices = [40, 41]
testingExamples = [data[index] for index in testingExampleIndices]
testX, testY = extractExamplesFromList(testingExamples, whichImage = 'image1bit', dist = 4)
rules = []
numErrors = []
startTime = time.time()
for i in range(81):
yHat = (trainX[:,i] == 0)
numErrors.append(sum(abs(yHat-trainY)))
rules.append([i, 0])
yHat = (trainX[:,i] == 1)
numErrors.append(sum(abs(yHat-trainY)))
rules.append([i, 1])
timeElapsed = time.time()-startTime
print 'Time elapsed = ' + str(round(timeElapsed, 3)) + ' seconds.'
Time elapsed = 0.01 seconds.
#Find the winner:
np.argmin(np.array(numErrors))
81
Rule number 81 wins!
#Make red and blue colormaps:
import matplotlib.colors as mcolors
c = mcolors.ColorConverter().to_rgb
bw = make_colormap([(1,1,1), (1,1,1), 0.33, c('blue'), c('blue'), 0.66, c('blue')])
rw = make_colormap([(1,1,1), (1,1,1), 0.33, c('red'), c('red'), 0.66, c('red')])
rwb = make_colormap([c('red'), c('red'), 0.33, (1,1,1), (1,1,1), 0.66, c('blue')])
ruleVector = np.zeros(81)
ruleVector[40] = 1
fig = figure(0, (6,6))
imshow(ruleVector.reshape(9,9), interpolation = 'none', cmap = bw)
<matplotlib.image.AxesImage at 0x11111cbd0>
This rule classifies examples as fingers that have ones in the 40 position (shown above) - pretty cool!
## First, visualize matches:
fig = figure(0, (10, 8))
for i in range(3):
fig.add_subplot(1,3,i+1)
imageDict = data[trainingExampleIndices[i]]
X1, y1 = extractFeatures(imageDict, whichImage = 'image1bit', dist = 4)
im = makeGrayScale(imageDict)
matchingIndices = (X1[:,40]==1)
matchingImage = matchingIndices.reshape(imageDict['boxHeight'], imageDict['boxWidth'])
#Paint with matches:
im[:,:,0][matchingImage] = 0
im[:,:,1][matchingImage] = 1
im[:,:,2][matchingImage] = 0
imshow(im)
axis('off')
Thats alot of matches!
#Now, compute confustion matrix on training data:
matchingIndices = np.where(trainX[:,40]==1)
#Make yHat vector, tells us which pixels our rules predict to be fingers.
yHat = np.zeros(trainX.shape[0])
yHat[matchingIndices] = 1
#Count up False Negatives, False Positives, True Positives, and True Negatives
FN = np.sum(np.logical_and(trainY==1, yHat==0))
FP = np.sum(np.logical_and(trainY==0, yHat==1))
TP = np.sum(np.logical_and(trainY==1, yHat==1))
TN = np.sum(np.logical_and(trainY==0, yHat==0))
#make confusion matrix:
confusionMatrix = np.array([[TP, FN], [FP, TN]])
print confusionMatrix
[[ 479 16] [1798 5574]]
accuracy = float(TP+TN)/float(trainX.shape[0])
recall = float(TP)/(TP+FN)
precision = float(TP)/(TP+FP)
print accuracy, recall, precision
0.769416550146 0.967676767677 0.210364514712
## First, visualize matches:
fig = figure(0, (10, 8))
for i in range(2):
fig.add_subplot(1,3,i+1)
imageDict = data[testingExampleIndices[i]]
X1, y1 = extractFeatures(imageDict, whichImage = 'image1bit', dist = 4)
im = makeGrayScale(imageDict)
matchingIndices = (X1[:,40]==1)
matchingImage = matchingIndices.reshape(imageDict['boxHeight'], imageDict['boxWidth'])
#Paint with matches:
im[:,:,0][matchingImage] = 0
im[:,:,1][matchingImage] = 1
im[:,:,2][matchingImage] = 0
imshow(im, interpolation = 'none')
axis('off')
#Now, compute confustion matrix on training data:
matchingIndices = np.where(testX[:,40]==1)
#Make yHat vector, tells us which pixels our rules predict to be fingers.
yHat = np.zeros(testX.shape[0])
yHat[matchingIndices] = 1
#Count up False Negatives, False Positives, True Positives, and True Negatives
FN = np.sum(np.logical_and(testY==1, yHat==0))
FP = np.sum(np.logical_and(testY==0, yHat==1))
TP = np.sum(np.logical_and(testY==1, yHat==1))
TN = np.sum(np.logical_and(testY==0, yHat==0))
#make confusion matrix:
confusionMatrix = np.array([[TP, FN], [FP, TN]])
print confusionMatrix
[[ 372 24] [1123 3782]]
accuracy = float(TP+TN)/float(testX.shape[0])
recall = float(TP)/(TP+FN)
precision = float(TP)/(TP+FP)
print accuracy, recall, precision
0.783625730994 0.939393939394 0.248829431438
Woohoo! Our method generalizes! We really made a hand detector, not a finger detector. But hopefully it's a step in the right direction.