!wget https://storage.googleapis.com/aibootcamp/data/plants.zip
--2018-03-29 22:35:01-- https://storage.googleapis.com/aibootcamp/data/plants.zip Resolving storage.googleapis.com (storage.googleapis.com)... 74.125.141.128, 2607:f8b0:400c:c06::80 Connecting to storage.googleapis.com (storage.googleapis.com)|74.125.141.128|:443... connected. HTTP request sent, awaiting response... 200 OK Length: 1808677454 (1.7G) [application/zip] Saving to: ‘plants.zip’ plants.zip 100%[===================>] 1.68G 138MB/s in 15s 2018-03-29 22:35:17 (113 MB/s) - ‘plants.zip’ saved [1808677454/1808677454]
!ls
datalab plants.zip
!unzip -q plants.zip
!ls
datalab plants plants.zip
!rm plants.zip
!unzip -q plants/train.zip
!ls
datalab plants train
!pip install Keras
Requirement already satisfied: Keras in /usr/local/lib/python3.6/dist-packages Requirement already satisfied: numpy>=1.9.1 in /usr/local/lib/python3.6/dist-packages (from Keras) Requirement already satisfied: scipy>=0.14 in /usr/local/lib/python3.6/dist-packages (from Keras) Requirement already satisfied: six>=1.9.0 in /usr/local/lib/python3.6/dist-packages (from Keras) Requirement already satisfied: pyyaml in /usr/local/lib/python3.6/dist-packages (from Keras)
!pip install tqdm
Collecting tqdm Downloading tqdm-4.19.9-py2.py3-none-any.whl (52kB) 100% |████████████████████████████████| 61kB 3.0MB/s Installing collected packages: tqdm Successfully installed tqdm-4.19.9
def fibonacci_generator():
a = 0
b = 1
while True:
yield a
a, b = b, a + b
fib_gen = fibonacci_generator()
next(fib_gen)
0
next(fib_gen)
1
next(fib_gen)
1
next(fib_gen)
2
next(fib_gen)
3
import os
from tqdm import tqdm
root_dir = 'train'
target_root = 'validation'
if not os.path.isdir(target_root):
os.mkdir(target_root)
for plant in tqdm(os.listdir(root_dir)):
plant_path = os.path.join(root_dir,plant)
target_plant_path = os.path.join(target_root,plant)
if not os.path.isdir(target_plant_path):
os.mkdir(target_plant_path)
files = os.listdir(plant_path)
for i in range(12):
source_path = os.path.join(plant_path,files[i])
dest_path = os.path.join(target_plant_path,files[i])
os.rename(source_path,dest_path)
100%|██████████| 12/12 [00:00<00:00, 1099.04it/s]
!ls validation/Black-grass
163c13912.png 594485a0c.png b024eeb75.png e2b2a20b2.png 2f6bc240a.png 686132594.png b937353c0.png ebd2350df.png 39e9bf4c7.png 9443199bb.png d0ad9c78b.png f84089a55.png
import datetime
import numpy as np
from keras.preprocessing.image import ImageDataGenerator
import seaborn as sns
sns.set(); np.random.seed(0)
%matplotlib inline
from tqdm import tqdm
import os
from sklearn.preprocessing import LabelEncoder
from sklearn.preprocessing import OneHotEncoder
import cv2
imgen = ImageDataGenerator(rescale=1/255)
train_generator = imgen.flow_from_directory('train',batch_size=32, target_size=(150,150))
Found 4606 images belonging to 12 classes.
validation_generator = imgen.flow_from_directory('validation',
batch_size=32,
target_size=(150,150))
Found 144 images belonging to 12 classes.
from keras.layers import Flatten,Dense, Activation
from keras.models import Sequential
model = Sequential()
model.add(Flatten(input_shape=(150,150,3)))
model.add(Dense(12))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy',optimizer='sgd', metrics = ['acc'])
model.fit_generator(train_generator,
epochs=10,
steps_per_epoch= 5515 // 32,
validation_data=validation_generator,
validation_steps= 144//32)
Epoch 1/10 172/172 [==============================] - 68s 397ms/step - loss: 12.6801 - acc: 0.1080 - val_loss: 13.2599 - val_acc: 0.1562 Epoch 2/10 172/172 [==============================] - 68s 396ms/step - loss: 12.2760 - acc: 0.1309 - val_loss: 12.1587 - val_acc: 0.1406 Epoch 3/10 123/172 [====================>.........] - ETA: 18s - loss: 11.8094 - acc: 0.1567172/172 [==============================] - 67s 387ms/step - loss: 11.7950 - acc: 0.1533 - val_loss: 10.7529 - val_acc: 0.1250 Epoch 4/10 172/172 [==============================] - 66s 386ms/step - loss: 11.6447 - acc: 0.1659 - val_loss: 9.5564 - val_acc: 0.2422 Epoch 5/10 169/172 [============================>.] - ETA: 1s - loss: 11.1230 - acc: 0.1674172/172 [==============================] - 68s 393ms/step - loss: 11.1044 - acc: 0.1680 - val_loss: 8.7388 - val_acc: 0.2109 Epoch 6/10 172/172 [==============================] - 65s 379ms/step - loss: 10.5921 - acc: 0.1728 - val_loss: 8.5795 - val_acc: 0.2422 Epoch 7/10 172/172 [==============================] - 67s 390ms/step - loss: 10.3695 - acc: 0.2180 - val_loss: 10.3935 - val_acc: 0.1797 Epoch 8/10 6/172 [>.............................] - ETA: 28s - loss: 11.1469 - acc: 0.2031172/172 [==============================] - 66s 385ms/step - loss: 10.3658 - acc: 0.2425 - val_loss: 9.2590 - val_acc: 0.2188 Epoch 9/10 172/172 [==============================] - 67s 388ms/step - loss: 9.9292 - acc: 0.2372 - val_loss: 8.8339 - val_acc: 0.1953 Epoch 10/10 143/172 [=======================>......] - ETA: 11s - loss: 9.6683 - acc: 0.2748155/172 [==========================>...] - ETA: 6s - loss: 9.7139 - acc: 0.2749
from keras.applications.vgg16 import VGG16
Using TensorFlow backend.
vgg_model = VGG16(include_top=False,input_shape=(150,150,3))
Downloading data from https://github.com/fchollet/deep-learning-models/releases/download/v0.1/vgg16_weights_tf_dim_ordering_tf_kernels_notop.h5 58892288/58889256 [==============================] - 1s 0us/step
vgg_model.summary()
_________________________________________________________________ Layer (type) Output Shape Param # ================================================================= input_1 (InputLayer) (None, 150, 150, 3) 0 _________________________________________________________________ block1_conv1 (Conv2D) (None, 150, 150, 64) 1792 _________________________________________________________________ block1_conv2 (Conv2D) (None, 150, 150, 64) 36928 _________________________________________________________________ block1_pool (MaxPooling2D) (None, 75, 75, 64) 0 _________________________________________________________________ block2_conv1 (Conv2D) (None, 75, 75, 128) 73856 _________________________________________________________________ block2_conv2 (Conv2D) (None, 75, 75, 128) 147584 _________________________________________________________________ block2_pool (MaxPooling2D) (None, 37, 37, 128) 0 _________________________________________________________________ block3_conv1 (Conv2D) (None, 37, 37, 256) 295168 _________________________________________________________________ block3_conv2 (Conv2D) (None, 37, 37, 256) 590080 _________________________________________________________________ block3_conv3 (Conv2D) (None, 37, 37, 256) 590080 _________________________________________________________________ block3_pool (MaxPooling2D) (None, 18, 18, 256) 0 _________________________________________________________________ block4_conv1 (Conv2D) (None, 18, 18, 512) 1180160 _________________________________________________________________ block4_conv2 (Conv2D) (None, 18, 18, 512) 2359808 _________________________________________________________________ block4_conv3 (Conv2D) (None, 18, 18, 512) 2359808 _________________________________________________________________ block4_pool (MaxPooling2D) (None, 9, 9, 512) 0 _________________________________________________________________ block5_conv1 (Conv2D) (None, 9, 9, 512) 2359808 _________________________________________________________________ block5_conv2 (Conv2D) (None, 9, 9, 512) 2359808 _________________________________________________________________ block5_conv3 (Conv2D) (None, 9, 9, 512) 2359808 _________________________________________________________________ block5_pool (MaxPooling2D) (None, 4, 4, 512) 0 ================================================================= Total params: 14,714,688 Trainable params: 14,714,688 Non-trainable params: 0 _________________________________________________________________
for layer in vgg_model.layers:
layer.trainable = False
finetune = Sequential(layers = vgg_model.layers)
finetune.add(Flatten())
finetune.add(Dense(12))
finetune.add(Activation('softmax'))
finetune.summary()
_________________________________________________________________ Layer (type) Output Shape Param # ================================================================= input_1 (InputLayer) (None, 150, 150, 3) 0 _________________________________________________________________ block1_conv1 (Conv2D) (None, 150, 150, 64) 1792 _________________________________________________________________ block1_conv2 (Conv2D) (None, 150, 150, 64) 36928 _________________________________________________________________ block1_pool (MaxPooling2D) (None, 75, 75, 64) 0 _________________________________________________________________ block2_conv1 (Conv2D) (None, 75, 75, 128) 73856 _________________________________________________________________ block2_conv2 (Conv2D) (None, 75, 75, 128) 147584 _________________________________________________________________ block2_pool (MaxPooling2D) (None, 37, 37, 128) 0 _________________________________________________________________ block3_conv1 (Conv2D) (None, 37, 37, 256) 295168 _________________________________________________________________ block3_conv2 (Conv2D) (None, 37, 37, 256) 590080 _________________________________________________________________ block3_conv3 (Conv2D) (None, 37, 37, 256) 590080 _________________________________________________________________ block3_pool (MaxPooling2D) (None, 18, 18, 256) 0 _________________________________________________________________ block4_conv1 (Conv2D) (None, 18, 18, 512) 1180160 _________________________________________________________________ block4_conv2 (Conv2D) (None, 18, 18, 512) 2359808 _________________________________________________________________ block4_conv3 (Conv2D) (None, 18, 18, 512) 2359808 _________________________________________________________________ block4_pool (MaxPooling2D) (None, 9, 9, 512) 0 _________________________________________________________________ block5_conv1 (Conv2D) (None, 9, 9, 512) 2359808 _________________________________________________________________ block5_conv2 (Conv2D) (None, 9, 9, 512) 2359808 _________________________________________________________________ block5_conv3 (Conv2D) (None, 9, 9, 512) 2359808 _________________________________________________________________ block5_pool (MaxPooling2D) (None, 4, 4, 512) 0 _________________________________________________________________ flatten_1 (Flatten) (None, 8192) 0 _________________________________________________________________ dense_1 (Dense) (None, 12) 98316 _________________________________________________________________ activation_1 (Activation) (None, 12) 0 ================================================================= Total params: 14,813,004 Trainable params: 98,316 Non-trainable params: 14,714,688 _________________________________________________________________
finetune.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics = ['acc'])
finetune.fit_generator(train_generator,
epochs=10,
steps_per_epoch= 5515 // 32,
validation_data=validation_generator,
validation_steps= 144//32)
Epoch 1/10 172/172 [==============================] - 74s 430ms/step - loss: 1.5665 - acc: 0.4871 - val_loss: 1.2699 - val_acc: 0.5625 Epoch 2/10 172/172 [==============================] - 71s 412ms/step - loss: 0.9621 - acc: 0.7154 - val_loss: 1.0961 - val_acc: 0.6328 Epoch 3/10 125/172 [====================>.........] - ETA: 17s - loss: 0.7842 - acc: 0.7728172/172 [==============================] - 69s 401ms/step - loss: 0.7534 - acc: 0.7862 - val_loss: 0.9763 - val_acc: 0.7266 Epoch 4/10 172/172 [==============================] - 72s 418ms/step - loss: 0.6250 - acc: 0.8218 - val_loss: 0.9774 - val_acc: 0.7188 Epoch 5/10 169/172 [============================>.] - ETA: 1s - loss: 0.5394 - acc: 0.8550172/172 [==============================] - 71s 413ms/step - loss: 0.5394 - acc: 0.8550 - val_loss: 0.8934 - val_acc: 0.7188 Epoch 6/10 172/172 [==============================] - 71s 413ms/step - loss: 0.4618 - acc: 0.8822 - val_loss: 0.8831 - val_acc: 0.6797 Epoch 7/10 172/172 [==============================] - 70s 406ms/step - loss: 0.4150 - acc: 0.8983 - val_loss: 0.8946 - val_acc: 0.7500 Epoch 8/10 4/172 [..............................] - ETA: 30s - loss: 0.3939 - acc: 0.9141172/172 [==============================] - 71s 411ms/step - loss: 0.3674 - acc: 0.9127 - val_loss: 0.9342 - val_acc: 0.7266 Epoch 9/10 172/172 [==============================] - 70s 407ms/step - loss: 0.3516 - acc: 0.9182 - val_loss: 0.8781 - val_acc: 0.7344 Epoch 10/10 140/172 [=======================>......] - ETA: 12s - loss: 0.3077 - acc: 0.9292172/172 [==============================] - 70s 408ms/step - loss: 0.3057 - acc: 0.9297 - val_loss: 0.8750 - val_acc: 0.7344
<keras.callbacks.History at 0x7fe7ca3c7b70>
!pip install bcolz
Collecting bcolz Downloading bcolz-1.2.0.tar.gz (1.4MB) 100% |████████████████████████████████| 1.4MB 906kB/s Requirement already satisfied: numpy>=1.7 in /usr/local/lib/python3.6/dist-packages (from bcolz) Building wheels for collected packages: bcolz Running setup.py bdist_wheel for bcolz ... - \ | / - \ | / - \ | / - \ | / - \ | / - \ | / - \ | / - \ | / - \ | / - \ | / - \ | / - \ | / - \ | / - \ | / - \ | done Stored in directory: /content/.cache/pip/wheels/78/ba/ff/0ac6555a3e174aa78eb8cf32c798a5bde0de5a2bc10c654e9b Successfully built bcolz Installing collected packages: bcolz Successfully installed bcolz-1.2.0
import bcolz
def save_array(fname, arr):
c=bcolz.carray(arr, rootdir=fname, mode='w')
c.flush()
def load_array(fname):
return bcolz.open(fname)[:]
source = 'train'
target = 'train_proc'
if not os.path.isdir(target):
os.mkdir(target)
for plant in os.listdir(source):
target_path = os.path.join(target,plant)
if not os.path.isdir(target_path):
os.mkdir(target_path)
source_path = os.path.join(source,plant)
print('Processing',plant)
for file in tqdm(os.listdir(source_path)):
img = cv2.imread(os.path.join(source_path,file))
img = cv2.resize(img, (150, 150))
img = np.expand_dims(img,0)
out = vgg_model.predict(img)
save_array(os.path.join(target_path,file), out)
2%|▏ | 4/246 [00:00<00:06, 37.05it/s]
Processing Common wheat
100%|██████████| 246/246 [00:04<00:00, 49.27it/s] 1%| | 4/452 [00:00<00:11, 39.80it/s]
Processing Sugar beet
100%|██████████| 452/452 [00:09<00:00, 47.04it/s] 2%|▏ | 5/248 [00:00<00:05, 48.52it/s]
Processing Maize
100%|██████████| 248/248 [00:05<00:00, 46.39it/s] 1%| | 6/704 [00:00<00:12, 54.62it/s]
Processing Common Chickweed
100%|██████████| 704/704 [00:13<00:00, 53.49it/s] 2%|▏ | 6/321 [00:00<00:07, 42.84it/s]
Processing Black-grass
100%|██████████| 321/321 [00:07<00:00, 41.90it/s] 1%| | 6/807 [00:00<00:16, 48.49it/s]
Processing Loose Silky-bent
100%|██████████| 807/807 [00:16<00:00, 48.58it/s] 2%|▏ | 6/264 [00:00<00:04, 52.53it/s]
Processing Shepherd’s Purse
100%|██████████| 264/264 [00:05<00:00, 50.62it/s] 1%| | 6/531 [00:00<00:09, 54.17it/s]
Processing Fat Hen
100%|██████████| 531/531 [00:10<00:00, 50.98it/s] 1%|▏ | 5/336 [00:00<00:06, 49.98it/s]
Processing Cleavers
100%|██████████| 336/336 [00:06<00:00, 52.02it/s] 1%| | 6/596 [00:00<00:11, 52.17it/s]
Processing Scentless Mayweed
100%|██████████| 596/596 [00:11<00:00, 51.77it/s] 1%| | 6/568 [00:00<00:10, 54.03it/s]
Processing Small-flowered Cranesbill
100%|██████████| 568/568 [00:11<00:00, 50.35it/s] 1%| | 5/442 [00:00<00:10, 42.16it/s]
Processing Charlock
100%|██████████| 442/442 [00:09<00:00, 46.28it/s]
source = 'validation'
root_dir = 'validation_proc'
if not os.path.isdir(root_dir):
os.mkdir(root_dir)
for plant in os.listdir(source):
target_path = os.path.join(root_dir,plant)
if not os.path.isdir(target_path):
os.mkdir(target_path)
source_path = os.path.join(source,plant)
print('Processing',plant)
for file in tqdm(os.listdir(source_path)):
img = cv2.imread(os.path.join(source_path,file))
img = cv2.resize(img, (150, 150))
img = img / 255
img = np.expand_dims(img,0)
out = vgg_model.predict(img)
save_array(os.path.join(target_path,file), out)
33%|███▎ | 4/12 [00:00<00:00, 38.10it/s]
Processing Common wheat
100%|██████████| 12/12 [00:00<00:00, 37.00it/s] 33%|███▎ | 4/12 [00:00<00:00, 34.39it/s]
Processing Sugar beet
100%|██████████| 12/12 [00:00<00:00, 39.57it/s] 42%|████▏ | 5/12 [00:00<00:00, 45.71it/s]
Processing Maize
100%|██████████| 12/12 [00:00<00:00, 45.46it/s] 50%|█████ | 6/12 [00:00<00:00, 54.09it/s]
Processing Common Chickweed
100%|██████████| 12/12 [00:00<00:00, 54.49it/s] 50%|█████ | 6/12 [00:00<00:00, 53.44it/s]
Processing Black-grass
100%|██████████| 12/12 [00:00<00:00, 54.59it/s] 50%|█████ | 6/12 [00:00<00:00, 55.14it/s]
Processing Loose Silky-bent
100%|██████████| 12/12 [00:00<00:00, 52.45it/s] 50%|█████ | 6/12 [00:00<00:00, 54.87it/s]
Processing Shepherd’s Purse
100%|██████████| 12/12 [00:00<00:00, 54.33it/s] 50%|█████ | 6/12 [00:00<00:00, 56.31it/s]
Processing Fat Hen
100%|██████████| 12/12 [00:00<00:00, 55.02it/s] 50%|█████ | 6/12 [00:00<00:00, 54.79it/s]
Processing Cleavers
100%|██████████| 12/12 [00:00<00:00, 53.61it/s] 50%|█████ | 6/12 [00:00<00:00, 56.78it/s]
Processing Scentless Mayweed
100%|██████████| 12/12 [00:00<00:00, 54.80it/s] 50%|█████ | 6/12 [00:00<00:00, 54.69it/s]
Processing Small-flowered Cranesbill
100%|██████████| 12/12 [00:00<00:00, 53.06it/s] 42%|████▏ | 5/12 [00:00<00:00, 49.12it/s]
Processing Charlock
100%|██████████| 12/12 [00:00<00:00, 49.45it/s]
ls
datalab/ Segmented/ train_proc/ Validation/ validation_proc/
root_dir = 'train_proc'
dirs = os.listdir(root_dir)
paths = []
targets = []
for dir in dirs:
path = os.path.join(root_dir,dir)
for file in os.listdir(path):
if file.endswith(".png"):
paths.append(os.path.join(path,file))
targets.append(dir)
nclasses = len(np.unique(targets))
nitems = len(targets)
print('Found {} items belonging to {} classes'.format(nitems,nclasses))
Found 5515 items belonging to 12 classes
labelenc = LabelEncoder()
int_targets = labelenc.fit_transform(targets)
onehot_enc = OneHotEncoder(sparse=False)
int_targets = int_targets.reshape(len(int_targets), 1)
onehot_targets = onehot_enc.fit_transform(int_targets)
indices = np.arange(len(paths))
np.random.shuffle(indices)
def bcz_imgen(root_dir, batch_size = 32):
dirs = os.listdir(root_dir)
paths = []
targets = []
for dir in dirs:
path = os.path.join(root_dir,dir)
for file in os.listdir(path):
paths.append(os.path.join(path,file))
targets.append(dir)
nclasses = len(np.unique(targets))
nitems = len(targets)
labelenc = LabelEncoder()
int_targets = labelenc.fit_transform(targets)
onehot_enc = OneHotEncoder(sparse=False)
int_targets = int_targets.reshape(len(int_targets), 1)
onehot_targets = onehot_enc.fit_transform(int_targets)
indices = np.arange(len(paths))
np.random.shuffle(indices)
while True:
image_stack = []
target_stack = []
for index in indices:
path = paths[index]
target = onehot_targets[index]
img = load_array(path)
image_stack.append(img)
target_stack.append(target)
if len(image_stack) == batch_size:
images = np.concatenate(image_stack,axis=0)
yield images, np.stack(target_stack)
image_stack = []
target_stack = []
train_gen = bcz_imgen('train_proc')
val_gen = bcz_imgen('validation_proc')
yld, tar = next(gen)
tar.shape
(32, 12)
from keras.layers import Conv2D, Dropout
model = Sequential()
#model.add(Conv2D(256,1,input_shape=(4,4,512)))
#model.add(Activation('relu'))
#model.add(Dropout(0.5))
model.add(Flatten())
model.add(Dense(12))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['acc'])
model.fit_generator(train_gen,
epochs=2,
steps_per_epoch= 5515 // 32,
validation_data=val_gen,
validation_steps= 144//32)
Epoch 1/2 172/172 [==============================] - 3s 15ms/step - loss: 1.6133 - acc: 0.7356 - val_loss: 3.0247 - val_acc: 0.1797 Epoch 2/2 172/172 [==============================] - 3s 17ms/step - loss: 0.3882 - acc: 0.9253 - val_loss: 3.2615 - val_acc: 0.2266
<keras.callbacks.History at 0x7f25d32cbe10>
os.listdir('train')
['Small-flowered Cranesbill', 'Sugar beet', 'Common Chickweed', 'Scentless Mayweed', 'Fat Hen', 'Shepherds Purse', 'Common wheat', 'Charlock', 'Cleavers', 'Black-grass', 'Maize', 'Loose Silky-bent']
!wget https://storage.googleapis.com/aibootcamp/data/plants.zip
--2018-03-25 18:49:23-- https://storage.googleapis.com/aibootcamp/data/plants.zip Resolving storage.googleapis.com (storage.googleapis.com)... 74.125.141.128, 2607:f8b0:400c:c06::80 Connecting to storage.googleapis.com (storage.googleapis.com)|74.125.141.128|:443... connected. HTTP request sent, awaiting response... 200 OK Length: 1808677454 (1.7G) [application/zip] Saving to: ‘plants.zip’ plants.zip 100%[===================>] 1.68G 142MB/s in 12s 2018-03-25 18:49:36 (143 MB/s) - ‘plants.zip’ saved [1808677454/1808677454]
!unzip plants.zip
Archive: plants.zip creating: plants/ extracting: plants/sample_submission.csv.zip extracting: plants/train.zip extracting: plants/test.zip
!unzip plants/train.zip
def cieluv(img, target):
# adapted from https://www.compuphase.com/cmetric.htm
img = img.astype('int')
aR, aG, aB = img[:,:,0], img[:,:,1], img[:,:,2]
bR, bG, bB = target
rmean = ((aR + bR) / 2.).astype('int')
r2 = np.square(aR - bR)
g2 = np.square(aG - bG)
b2 = np.square(aB - bB)
# final sqrt removed for speed; please square your thresholds accordingly
result = (((512+rmean)*r2)>>8) + 4*g2 + (((767-rmean)*b2)>>8)
return result
import matplotlib.pyplot as plt
def ocv_imgen(root_dir,batch_size = 32,
rescale = 1/255,
target_size = (150,150)):
dirs = os.listdir(root_dir)
paths = []
targets = []
for dir in dirs:
path = os.path.join(root_dir,dir)
for file in os.listdir(path):
paths.append(os.path.join(path,file))
targets.append(dir)
nclasses = len(np.unique(targets))
nitems = len(targets)
labelenc = LabelEncoder()
int_targets = labelenc.fit_transform(targets)
onehot_enc = OneHotEncoder(sparse=False)
int_targets = int_targets.reshape(len(int_targets), 1)
onehot_targets = onehot_enc.fit_transform(int_targets)
indices = np.arange(len(paths))
np.random.shuffle(indices)
while True:
image_stack = []
target_stack = []
for index in indices:
path = paths[index]
target = onehot_targets[index]
img = plt.imread(path)
img = np.round(img * 255).astype('ubyte')[:,:,:3]
img = cv2.resize(img, (150,150))
img_filter = (
(cieluv(img, (71, 86, 38)) > 1600)
& (cieluv(img, (65, 79, 19)) > 1600)
& (cieluv(img, (95, 106, 56)) > 1600)
& (cieluv(img, (56, 63, 43)) > 500)
)
img[img_filter] = 0
img = cv2.medianBlur(img, 9)
image_stack.append(img)
target_stack.append(target)
if len(image_stack) == batch_size:
images = np.stack(image_stack)
images = np.divide(images,rescale)
yield images, np.stack(target_stack)
image_stack = []
target_stack = []
train_gen = ocv_imgen('train', batch_size=32)
val_gen = ocv_imgen('validation', batch_size=32)
finetune.fit_generator(train_gen,
epochs=10,
steps_per_epoch= 5515 // 32,
validation_data=val_gen,
validation_steps= 144//32)
Epoch 1/10 172/172 [==============================] - 149s 864ms/step - loss: 13.9050 - acc: 0.1312 - val_loss: 14.8809 - val_acc: 0.0547 Epoch 2/10 172/172 [==============================] - 149s 865ms/step - loss: 13.9473 - acc: 0.1286 - val_loss: 14.5041 - val_acc: 0.0781 Epoch 3/10 116/172 [===================>..........] - ETA: 43s - loss: 13.7937 - acc: 0.1374172/172 [==============================] - 145s 842ms/step - loss: 13.7134 - acc: 0.1415 - val_loss: 14.5223 - val_acc: 0.0781 Epoch 4/10 172/172 [==============================] - 149s 865ms/step - loss: 13.8021 - acc: 0.1374 - val_loss: 14.5154 - val_acc: 0.0781 Epoch 5/10 159/172 [==========================>...] - ETA: 10s - loss: 13.7849 - acc: 0.1388172/172 [==============================] - 149s 865ms/step - loss: 13.7815 - acc: 0.1386 - val_loss: 14.4916 - val_acc: 0.1016 Epoch 6/10 172/172 [==============================] - 145s 841ms/step - loss: 13.8563 - acc: 0.1341 - val_loss: 14.5080 - val_acc: 0.0781 Epoch 7/10 170/172 [============================>.] - ETA: 1s - loss: 13.7833 - acc: 0.1384172/172 [==============================] - 149s 863ms/step - loss: 13.7697 - acc: 0.1392 - val_loss: 14.5007 - val_acc: 0.0781 Epoch 8/10 172/172 [==============================] - 145s 845ms/step - loss: 13.7309 - acc: 0.1414 - val_loss: 14.5232 - val_acc: 0.0781 Epoch 9/10 171/172 [============================>.] - ETA: 0s - loss: 13.7978 - acc: 0.1374172/172 [==============================] - 150s 875ms/step - loss: 13.7966 - acc: 0.1375 - val_loss: 14.5228 - val_acc: 0.0781 Epoch 10/10 172/172 [==============================] - 148s 859ms/step - loss: 13.7908 - acc: 0.1377 - val_loss: 14.4889 - val_acc: 0.1016
<keras.callbacks.History at 0x7f5ef6a1f9e8>
import matplotlib.pyplot as plt
a,b = next(gen)
a.shape
(5, 150, 150, 3)
plt.rcParams['image.cmap'] = 'gray'
plt.rcParams['figure.figsize'] = (16, 9)
plt.style.use('dark_background')
plt.imshow(a[0])
<matplotlib.image.AxesImage at 0x7f25d24e5748>
# Keras generator
times = []
for i in tqdm(range(1000)):
start = datetime.datetime.now()
dat = next(generator)
end = datetime.datetime.now()
diff = end - start
ms_elapsed = diff.total_seconds() * 1000
times.append(ms_elapsed)
100%|██████████| 1000/1000 [01:53<00:00, 8.84it/s]
sns.distplot(times)
<matplotlib.axes._subplots.AxesSubplot at 0x7f533b918898>
mean_t = np.mean(times)
max_t = np.max(times)
min_t = np.min(times)
std_t = np.std(times)
print('Mean time: {:f} \n Max time: {:f} \n Min time: {:f} \n Standard deviation: {:f}'.format(mean_t,max_t,min_t,std_t))
Mean time: 112.494005 Max time: 538.344000 Min time: 51.504000 Standard deviation: 51.480851
# Custom generator
times = []
for i in tqdm(range(1000)):
start = datetime.datetime.now()
dat = next(gen)
end = datetime.datetime.now()
diff = end - start
ms_elapsed = diff.total_seconds() * 1000
times.append(ms_elapsed)
100%|██████████| 1000/1000 [01:13<00:00, 13.57it/s]
sns.distplot(times)
<matplotlib.axes._subplots.AxesSubplot at 0x7f5330dcbd68>
mean_t = np.mean(times)
max_t = np.max(times)
min_t = np.min(times)
std_t = np.std(times)
print('Mean time: {:f} \n Max time: {:f} \n Min time: {:f} \n Standard deviation: {:f}'.format(mean_t,max_t,min_t,std_t))
Mean time: 73.185093 Max time: 271.268000 Min time: 33.525000 Standard deviation: 35.667303
train_datagen = ImageDataGenerator(
rescale = 1/255,
rotation_range=90,
width_shift_range=0.2,
height_shift_range=0.2,
shear_range=0.2,
zoom_range=0.1,
horizontal_flip=True,
fill_mode='nearest')
!ls train/Charlock
from keras.preprocessing import image
fname = 'train/Charlock/270209308.png'
img = image.load_img(fname, target_size=(150, 150))
img = image.img_to_array(img)
img = np.expand_dims(img,axis=0)
import matplotlib.pyplot as plt
gen = train_datagen.flow(img, batch_size=1)
for i in range(4):
plt.figure(i)
batch = next(gen)
imgplot = plt.imshow(image.array_to_img(batch[0]))
plt.show()
train_datagen.flow(img, batch_size=1)
batch.shape
(1, 150, 150, 3)
!pip install git+https://github.com/aleju/imgaug
Collecting git+https://github.com/aleju/imgaug Cloning https://github.com/aleju/imgaug to /tmp/pip-legoijks-build Requirement already satisfied: numpy>=1.7.0 in /usr/local/lib/python3.6/dist-packages (from imgaug==0.2.5) Requirement already satisfied: scikit-image>=0.11.0 in /usr/local/lib/python3.6/dist-packages (from imgaug==0.2.5) Requirement already satisfied: scipy in /usr/local/lib/python3.6/dist-packages (from imgaug==0.2.5) Requirement already satisfied: six in /usr/local/lib/python3.6/dist-packages (from imgaug==0.2.5) Requirement already satisfied: pillow>=2.1.0 in /usr/local/lib/python3.6/dist-packages (from scikit-image>=0.11.0->imgaug==0.2.5) Requirement already satisfied: PyWavelets>=0.4.0 in /usr/local/lib/python3.6/dist-packages (from scikit-image>=0.11.0->imgaug==0.2.5) Requirement already satisfied: networkx>=1.8 in /usr/local/lib/python3.6/dist-packages (from scikit-image>=0.11.0->imgaug==0.2.5) Requirement already satisfied: matplotlib>=1.3.1 in /usr/local/lib/python3.6/dist-packages (from scikit-image>=0.11.0->imgaug==0.2.5) Requirement already satisfied: olefile in /usr/local/lib/python3.6/dist-packages (from pillow>=2.1.0->scikit-image>=0.11.0->imgaug==0.2.5) Requirement already satisfied: decorator>=4.1.0 in /usr/local/lib/python3.6/dist-packages (from networkx>=1.8->scikit-image>=0.11.0->imgaug==0.2.5) Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib>=1.3.1->scikit-image>=0.11.0->imgaug==0.2.5) Requirement already satisfied: pytz in /usr/local/lib/python3.6/dist-packages (from matplotlib>=1.3.1->scikit-image>=0.11.0->imgaug==0.2.5) Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib>=1.3.1->scikit-image>=0.11.0->imgaug==0.2.5) Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.6/dist-packages (from matplotlib>=1.3.1->scikit-image>=0.11.0->imgaug==0.2.5) Installing collected packages: imgaug Running setup.py install for imgaug ... - \ done Successfully installed imgaug-0.2.5
import imgaug as ia
from imgaug import augmenters as iaa
import numpy as np
ia.seed(1)
# Example batch of images.
# The array has shape (32, 64, 64, 3) and dtype uint8.
images = np.array(
[ia.quokka(size=(64, 64)) for _ in range(32)],
dtype=np.uint8
)
seq = iaa.Sequential([
iaa.Fliplr(0.5), # horizontal flips
iaa.Crop(percent=(0, 0.1)), # random crops
# Small gaussian blur with random sigma between 0 and 0.5.
# But we only blur about 50% of all images.
iaa.Sometimes(0.5,
iaa.GaussianBlur(sigma=(0, 0.5))
),
# Strengthen or weaken the contrast in each image.
iaa.ContrastNormalization((0.75, 1.5)),
# Add gaussian noise.
# For 50% of all images, we sample the noise once per pixel.
# For the other 50% of all images, we sample the noise per pixel AND
# channel. This can change the color (not only brightness) of the
# pixels.
iaa.AdditiveGaussianNoise(loc=0, scale=(0.0, 0.05*255), per_channel=0.5),
# Make some images brighter and some darker.
# In 20% of all cases, we sample the multiplier once per channel,
# which can end up changing the color of the images.
iaa.Multiply((0.8, 1.2), per_channel=0.2),
], random_order=True) # apply augmenters in random order
images_aug = seq.augment_images(images)
/usr/local/lib/python3.6/dist-packages/scipy/misc/pilutil.py:482: FutureWarning: Conversion of the second argument of issubdtype from `int` to `np.signedinteger` is deprecated. In future, it will be treated as `np.int64 == np.dtype(int).type`. if issubdtype(ts, int): /usr/local/lib/python3.6/dist-packages/scipy/misc/pilutil.py:485: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`. elif issubdtype(type(size), float):
batch = seq.augment_images(img)
plt.imshow(batch[0])
<matplotlib.image.AxesImage at 0x7fe76cc56da0>
for i in range(4):
plt.figure(i)
batch = seq.augment_images(img)
imgplot = plt.imshow(image.array_to_img(batch[0]))
plt.show()
def ocv_imgen_aug(root_dir,batch_size = 32,
rescale = 1/255,
target_size = (150,150)):
dirs = os.listdir(root_dir)
paths = []
targets = []
for dir in dirs:
path = os.path.join(root_dir,dir)
for file in os.listdir(path):
paths.append(os.path.join(path,file))
targets.append(dir)
nclasses = len(np.unique(targets))
nitems = len(targets)
labelenc = LabelEncoder()
int_targets = labelenc.fit_transform(targets)
onehot_enc = OneHotEncoder(sparse=False)
int_targets = int_targets.reshape(len(int_targets), 1)
onehot_targets = onehot_enc.fit_transform(int_targets)
indices = np.arange(len(paths))
np.random.shuffle(indices)
while True:
image_stack = []
target_stack = []
for index in indices:
path = paths[index]
target = onehot_targets[index]
img = cv2.imread(path)
img = cv2.resize(img, (150,150))
image_stack.append(img)
target_stack.append(target)
if len(image_stack) == batch_size:
images = np.stack(image_stack)
images = seq.augment_images(images)
images = np.divide(images,rescale)
yield images, np.stack(target_stack)
image_stack = []
target_stack = []
gen = ocv_imgen_aug('train')
a, b = next(gen)
plt.imshow(a[2])
<matplotlib.image.AxesImage at 0x7fe76d187c18>
import numpy as np
from keras.applications.vgg16 import VGG16
from keras import backend as K
# dimensions of the generated pictures for each filter.
img_width = 224
img_height = 224
layer_name = 'block1_conv1'
# For conv layers
filter_index = 0
# For final layer
output_index = 184
# build the VGG16 network with ImageNet weights
model = VGG16(weights='imagenet')
print('Model loaded.')
model.summary()
Model loaded. _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= input_5 (InputLayer) (None, 224, 224, 3) 0 _________________________________________________________________ block1_conv1 (Conv2D) (None, 224, 224, 64) 1792 _________________________________________________________________ block1_conv2 (Conv2D) (None, 224, 224, 64) 36928 _________________________________________________________________ block1_pool (MaxPooling2D) (None, 112, 112, 64) 0 _________________________________________________________________ block2_conv1 (Conv2D) (None, 112, 112, 128) 73856 _________________________________________________________________ block2_conv2 (Conv2D) (None, 112, 112, 128) 147584 _________________________________________________________________ block2_pool (MaxPooling2D) (None, 56, 56, 128) 0 _________________________________________________________________ block3_conv1 (Conv2D) (None, 56, 56, 256) 295168 _________________________________________________________________ block3_conv2 (Conv2D) (None, 56, 56, 256) 590080 _________________________________________________________________ block3_conv3 (Conv2D) (None, 56, 56, 256) 590080 _________________________________________________________________ block3_pool (MaxPooling2D) (None, 28, 28, 256) 0 _________________________________________________________________ block4_conv1 (Conv2D) (None, 28, 28, 512) 1180160 _________________________________________________________________ block4_conv2 (Conv2D) (None, 28, 28, 512) 2359808 _________________________________________________________________ block4_conv3 (Conv2D) (None, 28, 28, 512) 2359808 _________________________________________________________________ block4_pool (MaxPooling2D) (None, 14, 14, 512) 0 _________________________________________________________________ block5_conv1 (Conv2D) (None, 14, 14, 512) 2359808 _________________________________________________________________ block5_conv2 (Conv2D) (None, 14, 14, 512) 2359808 _________________________________________________________________ block5_conv3 (Conv2D) (None, 14, 14, 512) 2359808 _________________________________________________________________ block5_pool (MaxPooling2D) (None, 7, 7, 512) 0 _________________________________________________________________ flatten (Flatten) (None, 25088) 0 _________________________________________________________________ fc1 (Dense) (None, 4096) 102764544 _________________________________________________________________ fc2 (Dense) (None, 4096) 16781312 _________________________________________________________________ predictions (Dense) (None, 1000) 4097000 ================================================================= Total params: 138,357,544 Trainable params: 138,357,544 Non-trainable params: 0 _________________________________________________________________
# get the symbolic outputs of each "key" layer (we gave them unique names).
layer_dict = dict([(layer.name, layer) for layer in model.layers[1:]])
def normalize(x):
# utility function to normalize a tensor by its L2 norm
return x / (K.sqrt(K.mean(K.square(x))) + K.epsilon())
print('Processing filter %d' % filter_index)
# we build a loss function that maximizes the activation
# of the nth filter of the layer considered
layer_output = layer_dict[layer_name].output
# Uncomment for outputs
loss = K.mean(model.output[:, output_index])
#Uncomment for Conv Layers
#loss = K.mean(layer_output[:, :, :, filter_index])
# this is the placeholder for the input images
input_img = model.input
# we compute the gradient of the input picture wrt this loss
grads = K.gradients(loss, input_img)[0]
# normalization trick: we normalize the gradient
grads = normalize(grads)
# this function returns the loss and grads given the input picture
iterate = K.function([input_img], [loss, grads])
Processing filter 0
input_img_data = np.random.rand(1,img_height,img_width,3)
# step size for gradient ascent
alpha = 0.01
# we run gradient ascent for 20 steps
for i in range(500):
loss_value, grads_value = iterate([input_img_data])
input_img_data += grads_value * alpha
print('Current loss value:', loss_value)
if loss_value <= 0.:
# some filters get stuck to 0, we can skip them
print('warning zero loss')
break
Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.9999945 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999464 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.99999475 Current loss value: 0.9999949 Current loss value: 0.9999949 Current loss value: 0.9999949 Current loss value: 0.9999949 Current loss value: 0.9999949 Current loss value: 0.9999949 Current loss value: 0.9999949 Current loss value: 0.9999949 Current loss value: 0.9999949 Current loss value: 0.9999949 Current loss value: 0.9999949 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995 Current loss value: 0.999995
def deprocess_image(x):
# normalize tensor: center on 0., ensure std is 0.1
x -= x.mean()
x /= (x.std() + K.epsilon())
x *= 0.1
# clip to [0, 1]
x += 0.5
x = np.clip(x, 0, 1)
# convert to RGB array
x *= 255
x = np.clip(x, 0, 255).astype('uint8')
return x
img = deprocess_image(input_img_data[0])
import matplotlib.pyplot as plt
plt.style.use(['dark_background'])
fig = plt.figure(figsize=(10,10))
ax = fig.add_subplot(111)
plt.imshow(img)
ax.grid(False)
from scipy.misc import imsave
imsave('test.png',img)
!ls
block5_conv3_filter_0.png datalab test.png block5_conv3_filter_1.png stitched_filters_8x8.png
from google.colab import files
files.download('test.png')