import os,sys
print(sys.version)
import re
from pathlib import Path
import math
3.5.2 (default, Nov 23 2017, 16:37:01) [GCC 5.4.0 20160609]
# OPTIONAL: Load the "autoreload" extension so that code can change
%load_ext autoreload
# OPTIONAL: always reload modules so that as you change code in src, it gets loaded
%autoreload 2
# If you want to reload manually, add a below line head.
%aimport
# ref: https://ipython.org/ipython-doc/3/config/extensions/autoreload.html
import matplotlib.pyplot as plt
%matplotlib inline
import numpy as np
import sklearn
from sklearn import datasets
import tqdm
import csv
import pandas as pd
seed = None
np.random.seed(seed=seed)
print("numpy ver: {}".format(np.__version__))
print("scikit-learn ver: {}".format(sklearn.__version__))
print("pandas ver: {}".format(pd.__version__))
Modules to reload: all-except-skipped Modules to skip: numpy ver: 1.15.1 scikit-learn ver: 0.19.2 pandas ver: 0.23.4
#____________________________________________________________________________________________________
# TensorFlow and Keras GPU configures
##________________________________________________________________________________
## OPTIONAL : set a GPU viewed by TensorFlow
###____________________________________________________________
### - https://stackoverflow.com/questions/37893755/tensorflow-set-cuda-visible-devices-within-jupyter
import os
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID" # see issue #152
os.environ["CUDA_VISIBLE_DEVICES"]="1"
##________________________________________________________________________________
##________________________________________________________________________________
## TensorFlow
###____________________________________________________________
import tensorflow as tf
print("tensorflow ver: {}".format(tf.__version__))
### eager mode
#tf.enable_eager_execution()
print("tf.executing_eagerly(): {}".format(tf.executing_eagerly()))
# You can double check that you have the correct devices visible to TF
# - https://stackoverflow.com/questions/37893755/tensorflow-set-cuda-visible-devices-within-jupyter
from tensorflow.python.client import device_lib
print("""
________________________________________
Visible GPUs from TensorFlow
________________________________________""")
for _device in device_lib.list_local_devices():
match = re.search(pattern=r'name: "/device:(?P<name>[A-Z]{3}):(?P<device_num>\d{1})*',
string=str(_device))
if match is None:
print("Not Match")
continue
if match.group("name") == "CPU":
name, device_num = match.group("name", "device_num")
print()
print("({}:{})".format(name, device_num))
continue
name, device_num = match.group("name", "device_num")
match = re.search(pattern=r'.*pci bus id: (?P<pci_bus_id>\d{4}:\d{2}:\d{2}.\d{1}).*',
string=str(_device))
if match is None:
print("No GPUs")
continue
print("({}:{}: pci_bus_id: {})".format(name, device_num, match.group("pci_bus_id")))
print("________________________________________")
###____________________________________________________________
### sessioin
global _SESSION
config = tf.ConfigProto(allow_soft_placement=True,
log_device_placement=True)
config.gpu_options.allow_growth = True
_SESSION = tf.Session(config=config)
###____________________________________________________________
##________________________________________________________________________________
#____________________________________________________________________________________________________
import keras
import keras.backend as K
config = tf.ConfigProto(allow_soft_placement=True,
log_device_placement=True)
config.gpu_options.allow_growth = True
_SESSION = tf.Session(config=config)
K.set_session(_SESSION)
tensorflow ver: 1.10.1 tf.executing_eagerly(): False ________________________________________ Visible GPUs from TensorFlow ________________________________________ (CPU:0) (GPU:0: pci_bus_id: 0000:02:00.0) ________________________________________
Using TensorFlow backend.
HOME = Path(os.getcwd()).parent
print(HOME)
/home/pollenjp/workdir/git/article_script/20180914__semi-supervised-deeplearning-ladder-networks__in_kabuku
path_list = []
data_Path = HOME / "data"
path_list.append(data_Path)
raw_Path = data_Path / "raw"
path_list.append(raw_Path)
plot_images_Path = data_Path / "plot_images"
path_list.append(plot_images_Path)
src_Path = HOME / "src"
path_list.append(src_Path)
for _Path in path_list:
_path = str(_Path)
if not os.path.exists(_path):
os.makedirs(name=_path)
print("make a directory: \n\t", _path)
else:
print(os.path.exists(_path), ": ", _path)
True : /home/pollenjp/workdir/git/article_script/20180914__semi-supervised-deeplearning-ladder-networks__in_kabuku/data True : /home/pollenjp/workdir/git/article_script/20180914__semi-supervised-deeplearning-ladder-networks__in_kabuku/data/raw True : /home/pollenjp/workdir/git/article_script/20180914__semi-supervised-deeplearning-ladder-networks__in_kabuku/data/plot_images True : /home/pollenjp/workdir/git/article_script/20180914__semi-supervised-deeplearning-ladder-networks__in_kabuku/src
sys.path.append(str(src_Path))
from data import mnist_input_data
from utils_tensorflow.tensorflow_graph_in_jupyer import show_computational_graph
num_labeled = 100
print( "=== Loading Data ===" )
mnist = mnist_input_data.read_data_sets(train_dir=str(raw_Path / "MNIST_data"), n_labeled=num_labeled,
fake_data=False, one_hot=True)
=== Loading Data === Extracting /home/pollenjp/workdir/git/article_script/20180914__semi-supervised-deeplearning-ladder-networks__in_kabuku/data/raw/MNIST_data/train-images-idx3-ubyte.gz Extracting /home/pollenjp/workdir/git/article_script/20180914__semi-supervised-deeplearning-ladder-networks__in_kabuku/data/raw/MNIST_data/train-labels-idx1-ubyte.gz Extracting /home/pollenjp/workdir/git/article_script/20180914__semi-supervised-deeplearning-ladder-networks__in_kabuku/data/raw/MNIST_data/t10k-images-idx3-ubyte.gz Extracting /home/pollenjp/workdir/git/article_script/20180914__semi-supervised-deeplearning-ladder-networks__in_kabuku/data/raw/MNIST_data/t10k-labels-idx1-ubyte.gz
X_train = mnist.semi_train.labeled_ds.images
y_train = mnist.semi_train.labeled_ds.labels
X_test = mnist.semi_test.images
y_test = mnist.semi_test.labels
print(X_train.shape)
print(y_train.shape)
print(X_test.shape)
print(y_test.shape)
(100, 784) (100, 10) (10000, 784) (10000, 10)
y_train[:10]
y_train.sum(axis=0)
array([10., 10., 10., 10., 10., 10., 10., 10., 10., 10.])
layer_sizes = [784, 1000, 500, 250, 250, 250, 10]
L = len(layer_sizes) - 1 # number of layers
num_examples = 60000
num_epochs = 300
starter_learning_rate = 0.02
decay_after = 15 # epoch after which to begin learning rate decay
batch_size = 100
num_iter = (num_examples//batch_size) * num_epochs # number of loop iterations
from keras.models import Sequential
from keras import layers
from keras import activations
K.clear_session()
layer_sizes = [784, 1000, 500, 250, 250, 250, 10]
model = Sequential()
model.add(layer=layers.Dense(units=layer_sizes[1], activation="relu", input_shape=(layer_sizes[0], )))
model.add(layer=layers.BatchNormalization())
model.add(layer=layers.Dense(units=layer_sizes[2], activation="relu"))
model.add(layer=layers.BatchNormalization())
model.add(layer=layers.Dense(units=layer_sizes[3], activation="relu"))
model.add(layer=layers.BatchNormalization())
model.add(layer=layers.Dense(units=layer_sizes[4], activation="relu"))
model.add(layer=layers.BatchNormalization())
model.add(layer=layers.Dense(units=layer_sizes[5], activation="relu"))
model.add(layer=layers.BatchNormalization())
model.add(layer=layers.Dense(units=layer_sizes[6], activation="softmax"))
model.summary()
_________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense_1 (Dense) (None, 1000) 785000 _________________________________________________________________ batch_normalization_1 (Batch (None, 1000) 4000 _________________________________________________________________ dense_2 (Dense) (None, 500) 500500 _________________________________________________________________ batch_normalization_2 (Batch (None, 500) 2000 _________________________________________________________________ dense_3 (Dense) (None, 250) 125250 _________________________________________________________________ batch_normalization_3 (Batch (None, 250) 1000 _________________________________________________________________ dense_4 (Dense) (None, 250) 62750 _________________________________________________________________ batch_normalization_4 (Batch (None, 250) 1000 _________________________________________________________________ dense_5 (Dense) (None, 250) 62750 _________________________________________________________________ batch_normalization_5 (Batch (None, 250) 1000 _________________________________________________________________ dense_6 (Dense) (None, 10) 2510 ================================================================= Total params: 1,547,760 Trainable params: 1,543,260 Non-trainable params: 4,500 _________________________________________________________________
from keras import optimizers
learning_rate = 0.02
metrics = ["accuracy"]
opt = optimizers.Adam(lr=learning_rate)
model.compile(optimizer=opt, loss="categorical_crossentropy", metrics=metrics)
model.fit(x=X_train, y=y_train, batch_size=batch_size, epochs=num_epochs,
validation_data=(X_test[:], y_test[:]) )
Train on 100 samples, validate on 10000 samples Epoch 1/300 100/100 [==============================] - 1s 13ms/step - loss: 2.9616 - acc: 0.1100 - val_loss: 10.6892 - val_acc: 0.2676 Epoch 2/300 100/100 [==============================] - 0s 995us/step - loss: 1.0904 - acc: 0.6400 - val_loss: 8.3102 - val_acc: 0.3977 Epoch 3/300 100/100 [==============================] - 0s 1ms/step - loss: 1.0202 - acc: 0.7500 - val_loss: 7.9438 - val_acc: 0.4422 Epoch 4/300 100/100 [==============================] - 0s 999us/step - loss: 0.1898 - acc: 0.9500 - val_loss: 8.8905 - val_acc: 0.3908 Epoch 5/300 100/100 [==============================] - 0s 1ms/step - loss: 0.1243 - acc: 0.9400 - val_loss: 9.1905 - val_acc: 0.3655 Epoch 6/300 100/100 [==============================] - 0s 999us/step - loss: 0.0336 - acc: 0.9900 - val_loss: 8.8903 - val_acc: 0.3698 Epoch 7/300 100/100 [==============================] - 0s 1ms/step - loss: 0.0116 - acc: 1.0000 - val_loss: 8.3459 - val_acc: 0.3929 Epoch 8/300 100/100 [==============================] - 0s 1ms/step - loss: 0.0099 - acc: 1.0000 - val_loss: 7.8748 - val_acc: 0.4149 Epoch 9/300 100/100 [==============================] - 0s 1ms/step - loss: 0.0046 - acc: 1.0000 - val_loss: 7.4300 - val_acc: 0.4361 Epoch 10/300 100/100 [==============================] - 0s 1ms/step - loss: 0.0033 - acc: 1.0000 - val_loss: 7.0338 - val_acc: 0.4545 Epoch 11/300 100/100 [==============================] - 0s 1ms/step - loss: 0.0022 - acc: 1.0000 - val_loss: 6.6922 - val_acc: 0.4752 Epoch 12/300 100/100 [==============================] - 0s 1ms/step - loss: 0.0015 - acc: 1.0000 - val_loss: 6.3802 - val_acc: 0.4938 Epoch 13/300 100/100 [==============================] - 0s 1ms/step - loss: 0.0012 - acc: 1.0000 - val_loss: 6.0966 - val_acc: 0.5096 Epoch 14/300 100/100 [==============================] - 0s 1ms/step - loss: 9.6306e-04 - acc: 1.0000 - val_loss: 5.8329 - val_acc: 0.5223 Epoch 15/300 100/100 [==============================] - 0s 1ms/step - loss: 8.3935e-04 - acc: 1.0000 - val_loss: 5.5857 - val_acc: 0.5343 Epoch 16/300 100/100 [==============================] - 0s 1ms/step - loss: 7.4601e-04 - acc: 1.0000 - val_loss: 5.3538 - val_acc: 0.5447 Epoch 17/300 100/100 [==============================] - 0s 1ms/step - loss: 6.7037e-04 - acc: 1.0000 - val_loss: 5.1367 - val_acc: 0.5563 Epoch 18/300 100/100 [==============================] - 0s 1ms/step - loss: 6.0773e-04 - acc: 1.0000 - val_loss: 4.9314 - val_acc: 0.5684 Epoch 19/300 100/100 [==============================] - 0s 1ms/step - loss: 5.4956e-04 - acc: 1.0000 - val_loss: 4.7381 - val_acc: 0.5793 Epoch 20/300 100/100 [==============================] - 0s 1ms/step - loss: 4.9384e-04 - acc: 1.0000 - val_loss: 4.5581 - val_acc: 0.5882 Epoch 21/300 100/100 [==============================] - 0s 994us/step - loss: 4.4163e-04 - acc: 1.0000 - val_loss: 4.3908 - val_acc: 0.5984 Epoch 22/300 100/100 [==============================] - 0s 1ms/step - loss: 3.9395e-04 - acc: 1.0000 - val_loss: 4.2370 - val_acc: 0.6052 Epoch 23/300 100/100 [==============================] - 0s 1ms/step - loss: 3.5147e-04 - acc: 1.0000 - val_loss: 4.0952 - val_acc: 0.6132 Epoch 24/300 100/100 [==============================] - 0s 999us/step - loss: 3.1479e-04 - acc: 1.0000 - val_loss: 3.9635 - val_acc: 0.6216 Epoch 25/300 100/100 [==============================] - 0s 1ms/step - loss: 2.8362e-04 - acc: 1.0000 - val_loss: 3.8419 - val_acc: 0.6295 Epoch 26/300 100/100 [==============================] - 0s 1ms/step - loss: 2.5723e-04 - acc: 1.0000 - val_loss: 3.7290 - val_acc: 0.6350 Epoch 27/300 100/100 [==============================] - 0s 1ms/step - loss: 2.3477e-04 - acc: 1.0000 - val_loss: 3.6254 - val_acc: 0.6403 Epoch 28/300 100/100 [==============================] - 0s 1ms/step - loss: 2.1544e-04 - acc: 1.0000 - val_loss: 3.5300 - val_acc: 0.6460 Epoch 29/300 100/100 [==============================] - 0s 1ms/step - loss: 1.9868e-04 - acc: 1.0000 - val_loss: 3.4415 - val_acc: 0.6512 Epoch 30/300 100/100 [==============================] - 0s 996us/step - loss: 1.8404e-04 - acc: 1.0000 - val_loss: 3.3603 - val_acc: 0.6561 Epoch 31/300 100/100 [==============================] - 0s 1ms/step - loss: 1.7111e-04 - acc: 1.0000 - val_loss: 3.2852 - val_acc: 0.6597 Epoch 32/300 100/100 [==============================] - 0s 999us/step - loss: 1.5961e-04 - acc: 1.0000 - val_loss: 3.2157 - val_acc: 0.6640 Epoch 33/300 100/100 [==============================] - 0s 1ms/step - loss: 1.4936e-04 - acc: 1.0000 - val_loss: 3.1505 - val_acc: 0.6675 Epoch 34/300 100/100 [==============================] - 0s 1ms/step - loss: 1.4010e-04 - acc: 1.0000 - val_loss: 3.0895 - val_acc: 0.6719 Epoch 35/300 100/100 [==============================] - 0s 1ms/step - loss: 1.3173e-04 - acc: 1.0000 - val_loss: 3.0327 - val_acc: 0.6756 Epoch 36/300 100/100 [==============================] - 0s 1ms/step - loss: 1.2415e-04 - acc: 1.0000 - val_loss: 2.9796 - val_acc: 0.6790 Epoch 37/300 100/100 [==============================] - 0s 1ms/step - loss: 1.1727e-04 - acc: 1.0000 - val_loss: 2.9295 - val_acc: 0.6813 Epoch 38/300 100/100 [==============================] - 0s 1ms/step - loss: 1.1103e-04 - acc: 1.0000 - val_loss: 2.8822 - val_acc: 0.6838 Epoch 39/300 100/100 [==============================] - 0s 1ms/step - loss: 1.0532e-04 - acc: 1.0000 - val_loss: 2.8377 - val_acc: 0.6863 Epoch 40/300 100/100 [==============================] - 0s 1ms/step - loss: 1.0009e-04 - acc: 1.0000 - val_loss: 2.7961 - val_acc: 0.6883 Epoch 41/300 100/100 [==============================] - 0s 996us/step - loss: 9.5319e-05 - acc: 1.0000 - val_loss: 2.7574 - val_acc: 0.6901 Epoch 42/300 100/100 [==============================] - 0s 989us/step - loss: 9.0924e-05 - acc: 1.0000 - val_loss: 2.7209 - val_acc: 0.6912 Epoch 43/300 100/100 [==============================] - 0s 1ms/step - loss: 8.6882e-05 - acc: 1.0000 - val_loss: 2.6864 - val_acc: 0.6923 Epoch 44/300 100/100 [==============================] - 0s 995us/step - loss: 8.3176e-05 - acc: 1.0000 - val_loss: 2.6536 - val_acc: 0.6938 Epoch 45/300 100/100 [==============================] - 0s 1ms/step - loss: 7.9764e-05 - acc: 1.0000 - val_loss: 2.6224 - val_acc: 0.6953 Epoch 46/300 100/100 [==============================] - 0s 1ms/step - loss: 7.6622e-05 - acc: 1.0000 - val_loss: 2.5928 - val_acc: 0.6960 Epoch 47/300 100/100 [==============================] - 0s 999us/step - loss: 7.3730e-05 - acc: 1.0000 - val_loss: 2.5648 - val_acc: 0.6973 Epoch 48/300 100/100 [==============================] - 0s 1ms/step - loss: 7.1056e-05 - acc: 1.0000 - val_loss: 2.5383 - val_acc: 0.6997 Epoch 49/300 100/100 [==============================] - 0s 991us/step - loss: 6.8571e-05 - acc: 1.0000 - val_loss: 2.5130 - val_acc: 0.7000 Epoch 50/300 100/100 [==============================] - 0s 1ms/step - loss: 6.6281e-05 - acc: 1.0000 - val_loss: 2.4888 - val_acc: 0.7001 Epoch 51/300 100/100 [==============================] - 0s 999us/step - loss: 6.4148e-05 - acc: 1.0000 - val_loss: 2.4659 - val_acc: 0.7007 Epoch 52/300 100/100 [==============================] - 0s 1ms/step - loss: 6.2165e-05 - acc: 1.0000 - val_loss: 2.4439 - val_acc: 0.7026 Epoch 53/300 100/100 [==============================] - 0s 993us/step - loss: 6.0307e-05 - acc: 1.0000 - val_loss: 2.4229 - val_acc: 0.7035 Epoch 54/300 100/100 [==============================] - 0s 1ms/step - loss: 5.8590e-05 - acc: 1.0000 - val_loss: 2.4028 - val_acc: 0.7040 Epoch 55/300 100/100 [==============================] - 0s 1ms/step - loss: 5.6975e-05 - acc: 1.0000 - val_loss: 2.3835 - val_acc: 0.7045 Epoch 56/300 100/100 [==============================] - 0s 1ms/step - loss: 5.5458e-05 - acc: 1.0000 - val_loss: 2.3652 - val_acc: 0.7051 Epoch 57/300 100/100 [==============================] - 0s 999us/step - loss: 5.4039e-05 - acc: 1.0000 - val_loss: 2.3476 - val_acc: 0.7058 Epoch 58/300 100/100 [==============================] - 0s 1ms/step - loss: 5.2710e-05 - acc: 1.0000 - val_loss: 2.3308 - val_acc: 0.7059 Epoch 59/300 100/100 [==============================] - 0s 1ms/step - loss: 5.1451e-05 - acc: 1.0000 - val_loss: 2.3147 - val_acc: 0.7066 Epoch 60/300 100/100 [==============================] - 0s 1ms/step - loss: 5.0271e-05 - acc: 1.0000 - val_loss: 2.2994 - val_acc: 0.7063 Epoch 61/300 100/100 [==============================] - 0s 1ms/step - loss: 4.9149e-05 - acc: 1.0000 - val_loss: 2.2847 - val_acc: 0.7072 Epoch 62/300 100/100 [==============================] - 0s 999us/step - loss: 4.8092e-05 - acc: 1.0000 - val_loss: 2.2706 - val_acc: 0.7078 Epoch 63/300 100/100 [==============================] - 0s 1ms/step - loss: 4.7094e-05 - acc: 1.0000 - val_loss: 2.2571 - val_acc: 0.7087 Epoch 64/300 100/100 [==============================] - 0s 1ms/step - loss: 4.6142e-05 - acc: 1.0000 - val_loss: 2.2441 - val_acc: 0.7088 Epoch 65/300 100/100 [==============================] - 0s 1ms/step - loss: 4.5239e-05 - acc: 1.0000 - val_loss: 2.2317 - val_acc: 0.7086 Epoch 66/300 100/100 [==============================] - 0s 999us/step - loss: 4.4372e-05 - acc: 1.0000 - val_loss: 2.2198 - val_acc: 0.7090 Epoch 67/300 100/100 [==============================] - 0s 999us/step - loss: 4.3565e-05 - acc: 1.0000 - val_loss: 2.2082 - val_acc: 0.7085 Epoch 68/300 100/100 [==============================] - 0s 990us/step - loss: 4.2783e-05 - acc: 1.0000 - val_loss: 2.1971 - val_acc: 0.7089 Epoch 69/300 100/100 [==============================] - 0s 1ms/step - loss: 4.2028e-05 - acc: 1.0000 - val_loss: 2.1864 - val_acc: 0.7091 Epoch 70/300 100/100 [==============================] - 0s 1ms/step - loss: 4.1319e-05 - acc: 1.0000 - val_loss: 2.1760 - val_acc: 0.7089 Epoch 71/300 100/100 [==============================] - 0s 999us/step - loss: 4.0632e-05 - acc: 1.0000 - val_loss: 2.1661 - val_acc: 0.7089 Epoch 72/300 100/100 [==============================] - 0s 1ms/step - loss: 3.9978e-05 - acc: 1.0000 - val_loss: 2.1564 - val_acc: 0.7085 Epoch 73/300 100/100 [==============================] - 0s 1ms/step - loss: 3.9351e-05 - acc: 1.0000 - val_loss: 2.1472 - val_acc: 0.7080 Epoch 74/300 100/100 [==============================] - 0s 1ms/step - loss: 3.8735e-05 - acc: 1.0000 - val_loss: 2.1382 - val_acc: 0.7080 Epoch 75/300 100/100 [==============================] - 0s 1ms/step - loss: 3.8152e-05 - acc: 1.0000 - val_loss: 2.1295 - val_acc: 0.7082 Epoch 76/300 100/100 [==============================] - 0s 1ms/step - loss: 3.7593e-05 - acc: 1.0000 - val_loss: 2.1212 - val_acc: 0.7082 Epoch 77/300 100/100 [==============================] - 0s 1ms/step - loss: 3.7045e-05 - acc: 1.0000 - val_loss: 2.1132 - val_acc: 0.7084 Epoch 78/300 100/100 [==============================] - 0s 1ms/step - loss: 3.6517e-05 - acc: 1.0000 - val_loss: 2.1054 - val_acc: 0.7087 Epoch 79/300 100/100 [==============================] - 0s 1ms/step - loss: 3.6007e-05 - acc: 1.0000 - val_loss: 2.0979 - val_acc: 0.7090 Epoch 80/300 100/100 [==============================] - 0s 1ms/step - loss: 3.5521e-05 - acc: 1.0000 - val_loss: 2.0907 - val_acc: 0.7090 Epoch 81/300 100/100 [==============================] - 0s 1ms/step - loss: 3.5047e-05 - acc: 1.0000 - val_loss: 2.0837 - val_acc: 0.7088 Epoch 82/300 100/100 [==============================] - 0s 1ms/step - loss: 3.4583e-05 - acc: 1.0000 - val_loss: 2.0769 - val_acc: 0.7093 Epoch 83/300 100/100 [==============================] - 0s 1ms/step - loss: 3.4134e-05 - acc: 1.0000 - val_loss: 2.0704 - val_acc: 0.7090 Epoch 84/300 100/100 [==============================] - 0s 1ms/step - loss: 3.3701e-05 - acc: 1.0000 - val_loss: 2.0640 - val_acc: 0.7089 Epoch 85/300 100/100 [==============================] - 0s 1ms/step - loss: 3.3281e-05 - acc: 1.0000 - val_loss: 2.0579 - val_acc: 0.7094 Epoch 86/300 100/100 [==============================] - 0s 1ms/step - loss: 3.2867e-05 - acc: 1.0000 - val_loss: 2.0520 - val_acc: 0.7092 Epoch 87/300 100/100 [==============================] - 0s 1ms/step - loss: 3.2464e-05 - acc: 1.0000 - val_loss: 2.0463 - val_acc: 0.7088 Epoch 88/300 100/100 [==============================] - 0s 1ms/step - loss: 3.2082e-05 - acc: 1.0000 - val_loss: 2.0408 - val_acc: 0.7084 Epoch 89/300 100/100 [==============================] - 0s 1ms/step - loss: 3.1700e-05 - acc: 1.0000 - val_loss: 2.0355 - val_acc: 0.7086 Epoch 90/300 100/100 [==============================] - 0s 1ms/step - loss: 3.1338e-05 - acc: 1.0000 - val_loss: 2.0303 - val_acc: 0.7083 Epoch 91/300 100/100 [==============================] - 0s 1ms/step - loss: 3.0975e-05 - acc: 1.0000 - val_loss: 2.0253 - val_acc: 0.7082 Epoch 92/300 100/100 [==============================] - 0s 1ms/step - loss: 3.0625e-05 - acc: 1.0000 - val_loss: 2.0205 - val_acc: 0.7085 Epoch 93/300 100/100 [==============================] - 0s 1ms/step - loss: 3.0282e-05 - acc: 1.0000 - val_loss: 2.0159 - val_acc: 0.7086 Epoch 94/300 100/100 [==============================] - 0s 1ms/step - loss: 2.9951e-05 - acc: 1.0000 - val_loss: 2.0114 - val_acc: 0.7086 Epoch 95/300 100/100 [==============================] - 0s 1ms/step - loss: 2.9625e-05 - acc: 1.0000 - val_loss: 2.0070 - val_acc: 0.7083 Epoch 96/300 100/100 [==============================] - 0s 1ms/step - loss: 2.9304e-05 - acc: 1.0000 - val_loss: 2.0028 - val_acc: 0.7085 Epoch 97/300 100/100 [==============================] - 0s 1ms/step - loss: 2.8990e-05 - acc: 1.0000 - val_loss: 1.9987 - val_acc: 0.7086 Epoch 98/300 100/100 [==============================] - 0s 1ms/step - loss: 2.8686e-05 - acc: 1.0000 - val_loss: 1.9947 - val_acc: 0.7081 Epoch 99/300 100/100 [==============================] - 0s 1ms/step - loss: 2.8392e-05 - acc: 1.0000 - val_loss: 1.9908 - val_acc: 0.7082 Epoch 100/300 100/100 [==============================] - 0s 1ms/step - loss: 2.8100e-05 - acc: 1.0000 - val_loss: 1.9871 - val_acc: 0.7077 Epoch 101/300 100/100 [==============================] - 0s 1ms/step - loss: 2.7812e-05 - acc: 1.0000 - val_loss: 1.9835 - val_acc: 0.7077 Epoch 102/300 100/100 [==============================] - 0s 1ms/step - loss: 2.7535e-05 - acc: 1.0000 - val_loss: 1.9800 - val_acc: 0.7064 Epoch 103/300 100/100 [==============================] - 0s 1ms/step - loss: 2.7259e-05 - acc: 1.0000 - val_loss: 1.9765 - val_acc: 0.7064 Epoch 104/300 100/100 [==============================] - 0s 1ms/step - loss: 2.6987e-05 - acc: 1.0000 - val_loss: 1.9732 - val_acc: 0.7064 Epoch 105/300 100/100 [==============================] - 0s 1ms/step - loss: 2.6726e-05 - acc: 1.0000 - val_loss: 1.9700 - val_acc: 0.7063 Epoch 106/300 100/100 [==============================] - 0s 1ms/step - loss: 2.6468e-05 - acc: 1.0000 - val_loss: 1.9669 - val_acc: 0.7061 Epoch 107/300 100/100 [==============================] - 0s 1ms/step - loss: 2.6205e-05 - acc: 1.0000 - val_loss: 1.9638 - val_acc: 0.7062 Epoch 108/300 100/100 [==============================] - 0s 1ms/step - loss: 2.5962e-05 - acc: 1.0000 - val_loss: 1.9609 - val_acc: 0.7060 Epoch 109/300 100/100 [==============================] - 0s 1ms/step - loss: 2.5717e-05 - acc: 1.0000 - val_loss: 1.9580 - val_acc: 0.7062 Epoch 110/300 100/100 [==============================] - 0s 997us/step - loss: 2.5474e-05 - acc: 1.0000 - val_loss: 1.9552 - val_acc: 0.7061 Epoch 111/300 100/100 [==============================] - 0s 1ms/step - loss: 2.5243e-05 - acc: 1.0000 - val_loss: 1.9524 - val_acc: 0.7061 Epoch 112/300 100/100 [==============================] - 0s 1ms/step - loss: 2.5007e-05 - acc: 1.0000 - val_loss: 1.9498 - val_acc: 0.7064 Epoch 113/300 100/100 [==============================] - 0s 1ms/step - loss: 2.4778e-05 - acc: 1.0000 - val_loss: 1.9472 - val_acc: 0.7062 Epoch 114/300 100/100 [==============================] - 0s 1ms/step - loss: 2.4552e-05 - acc: 1.0000 - val_loss: 1.9447 - val_acc: 0.7062 Epoch 115/300 100/100 [==============================] - 0s 1ms/step - loss: 2.4331e-05 - acc: 1.0000 - val_loss: 1.9422 - val_acc: 0.7063 Epoch 116/300 100/100 [==============================] - 0s 1ms/step - loss: 2.4118e-05 - acc: 1.0000 - val_loss: 1.9398 - val_acc: 0.7061 Epoch 117/300 100/100 [==============================] - 0s 1ms/step - loss: 2.3904e-05 - acc: 1.0000 - val_loss: 1.9375 - val_acc: 0.7060 Epoch 118/300 100/100 [==============================] - 0s 1ms/step - loss: 2.3696e-05 - acc: 1.0000 - val_loss: 1.9352 - val_acc: 0.7059 Epoch 119/300 100/100 [==============================] - 0s 1ms/step - loss: 2.3493e-05 - acc: 1.0000 - val_loss: 1.9330 - val_acc: 0.7062 Epoch 120/300 100/100 [==============================] - 0s 996us/step - loss: 2.3289e-05 - acc: 1.0000 - val_loss: 1.9308 - val_acc: 0.7062 Epoch 121/300 100/100 [==============================] - 0s 1ms/step - loss: 2.3092e-05 - acc: 1.0000 - val_loss: 1.9287 - val_acc: 0.7062 Epoch 122/300 100/100 [==============================] - 0s 1ms/step - loss: 2.2892e-05 - acc: 1.0000 - val_loss: 1.9266 - val_acc: 0.7060 Epoch 123/300 100/100 [==============================] - 0s 1ms/step - loss: 2.2699e-05 - acc: 1.0000 - val_loss: 1.9246 - val_acc: 0.7061 Epoch 124/300 100/100 [==============================] - 0s 1ms/step - loss: 2.2508e-05 - acc: 1.0000 - val_loss: 1.9226 - val_acc: 0.7062 Epoch 125/300 100/100 [==============================] - 0s 999us/step - loss: 2.2323e-05 - acc: 1.0000 - val_loss: 1.9206 - val_acc: 0.7057 Epoch 126/300 100/100 [==============================] - 0s 1000us/step - loss: 2.2140e-05 - acc: 1.0000 - val_loss: 1.9187 - val_acc: 0.7058 Epoch 127/300 100/100 [==============================] - 0s 1ms/step - loss: 2.1950e-05 - acc: 1.0000 - val_loss: 1.9169 - val_acc: 0.7056 Epoch 128/300 100/100 [==============================] - 0s 1000us/step - loss: 2.1772e-05 - acc: 1.0000 - val_loss: 1.9151 - val_acc: 0.7058 Epoch 129/300 100/100 [==============================] - 0s 1ms/step - loss: 2.1594e-05 - acc: 1.0000 - val_loss: 1.9133 - val_acc: 0.7059 Epoch 130/300 100/100 [==============================] - 0s 1000us/step - loss: 2.1422e-05 - acc: 1.0000 - val_loss: 1.9116 - val_acc: 0.7056 Epoch 131/300 100/100 [==============================] - 0s 1ms/step - loss: 2.1256e-05 - acc: 1.0000 - val_loss: 1.9099 - val_acc: 0.7055 Epoch 132/300 100/100 [==============================] - 0s 1ms/step - loss: 2.1087e-05 - acc: 1.0000 - val_loss: 1.9082 - val_acc: 0.7059 Epoch 133/300 100/100 [==============================] - 0s 1ms/step - loss: 2.0919e-05 - acc: 1.0000 - val_loss: 1.9066 - val_acc: 0.7058 Epoch 134/300 100/100 [==============================] - 0s 1ms/step - loss: 2.0761e-05 - acc: 1.0000 - val_loss: 1.9050 - val_acc: 0.7057 Epoch 135/300 100/100 [==============================] - 0s 996us/step - loss: 2.0594e-05 - acc: 1.0000 - val_loss: 1.9035 - val_acc: 0.7055 Epoch 136/300 100/100 [==============================] - 0s 1ms/step - loss: 2.0434e-05 - acc: 1.0000 - val_loss: 1.9020 - val_acc: 0.7055 Epoch 137/300 100/100 [==============================] - 0s 1ms/step - loss: 2.0280e-05 - acc: 1.0000 - val_loss: 1.9005 - val_acc: 0.7054 Epoch 138/300 100/100 [==============================] - 0s 1ms/step - loss: 2.0122e-05 - acc: 1.0000 - val_loss: 1.8991 - val_acc: 0.7054 Epoch 139/300 100/100 [==============================] - 0s 995us/step - loss: 1.9969e-05 - acc: 1.0000 - val_loss: 1.8976 - val_acc: 0.7054 Epoch 140/300 100/100 [==============================] - 0s 1ms/step - loss: 1.9817e-05 - acc: 1.0000 - val_loss: 1.8962 - val_acc: 0.7053 Epoch 141/300 100/100 [==============================] - 0s 1ms/step - loss: 1.9669e-05 - acc: 1.0000 - val_loss: 1.8949 - val_acc: 0.7050 Epoch 142/300 100/100 [==============================] - 0s 1ms/step - loss: 1.9523e-05 - acc: 1.0000 - val_loss: 1.8935 - val_acc: 0.7047 Epoch 143/300 100/100 [==============================] - 0s 1ms/step - loss: 1.9381e-05 - acc: 1.0000 - val_loss: 1.8922 - val_acc: 0.7044 Epoch 144/300 100/100 [==============================] - 0s 1ms/step - loss: 1.9235e-05 - acc: 1.0000 - val_loss: 1.8909 - val_acc: 0.7042 Epoch 145/300 100/100 [==============================] - 0s 1ms/step - loss: 1.9092e-05 - acc: 1.0000 - val_loss: 1.8897 - val_acc: 0.7039 Epoch 146/300 100/100 [==============================] - 0s 1ms/step - loss: 1.8949e-05 - acc: 1.0000 - val_loss: 1.8884 - val_acc: 0.7039 Epoch 147/300 100/100 [==============================] - 0s 1ms/step - loss: 1.8812e-05 - acc: 1.0000 - val_loss: 1.8872 - val_acc: 0.7040 Epoch 148/300 100/100 [==============================] - 0s 1ms/step - loss: 1.8676e-05 - acc: 1.0000 - val_loss: 1.8860 - val_acc: 0.7039 Epoch 149/300 100/100 [==============================] - 0s 992us/step - loss: 1.8540e-05 - acc: 1.0000 - val_loss: 1.8849 - val_acc: 0.7033 Epoch 150/300 100/100 [==============================] - 0s 1ms/step - loss: 1.8412e-05 - acc: 1.0000 - val_loss: 1.8837 - val_acc: 0.7034 Epoch 151/300 100/100 [==============================] - 0s 1ms/step - loss: 1.8281e-05 - acc: 1.0000 - val_loss: 1.8826 - val_acc: 0.7033 Epoch 152/300 100/100 [==============================] - 0s 1ms/step - loss: 1.8152e-05 - acc: 1.0000 - val_loss: 1.8815 - val_acc: 0.7036 Epoch 153/300 100/100 [==============================] - 0s 999us/step - loss: 1.8019e-05 - acc: 1.0000 - val_loss: 1.8805 - val_acc: 0.7036 Epoch 154/300 100/100 [==============================] - 0s 1ms/step - loss: 1.7894e-05 - acc: 1.0000 - val_loss: 1.8794 - val_acc: 0.7037 Epoch 155/300 100/100 [==============================] - 0s 999us/step - loss: 1.7773e-05 - acc: 1.0000 - val_loss: 1.8784 - val_acc: 0.7038 Epoch 156/300 100/100 [==============================] - 0s 1ms/step - loss: 1.7649e-05 - acc: 1.0000 - val_loss: 1.8774 - val_acc: 0.7038 Epoch 157/300 100/100 [==============================] - 0s 1ms/step - loss: 1.7524e-05 - acc: 1.0000 - val_loss: 1.8764 - val_acc: 0.7036 Epoch 158/300 100/100 [==============================] - 0s 997us/step - loss: 1.7399e-05 - acc: 1.0000 - val_loss: 1.8754 - val_acc: 0.7035 Epoch 159/300 100/100 [==============================] - 0s 1ms/step - loss: 1.7285e-05 - acc: 1.0000 - val_loss: 1.8745 - val_acc: 0.7033 Epoch 160/300 100/100 [==============================] - 0s 993us/step - loss: 1.7166e-05 - acc: 1.0000 - val_loss: 1.8735 - val_acc: 0.7029 Epoch 161/300 100/100 [==============================] - 0s 1ms/step - loss: 1.7051e-05 - acc: 1.0000 - val_loss: 1.8726 - val_acc: 0.7030 Epoch 162/300 100/100 [==============================] - 0s 1ms/step - loss: 1.6933e-05 - acc: 1.0000 - val_loss: 1.8717 - val_acc: 0.7032 Epoch 163/300 100/100 [==============================] - 0s 1ms/step - loss: 1.6818e-05 - acc: 1.0000 - val_loss: 1.8708 - val_acc: 0.7032 Epoch 164/300 100/100 [==============================] - 0s 1ms/step - loss: 1.6705e-05 - acc: 1.0000 - val_loss: 1.8699 - val_acc: 0.7032 Epoch 165/300 100/100 [==============================] - 0s 1ms/step - loss: 1.6603e-05 - acc: 1.0000 - val_loss: 1.8691 - val_acc: 0.7031 Epoch 166/300 100/100 [==============================] - 0s 998us/step - loss: 1.6491e-05 - acc: 1.0000 - val_loss: 1.8682 - val_acc: 0.7029 Epoch 167/300 100/100 [==============================] - 0s 1ms/step - loss: 1.6378e-05 - acc: 1.0000 - val_loss: 1.8674 - val_acc: 0.7027 Epoch 168/300 100/100 [==============================] - 0s 1ms/step - loss: 1.6274e-05 - acc: 1.0000 - val_loss: 1.8666 - val_acc: 0.7028 Epoch 169/300 100/100 [==============================] - 0s 1ms/step - loss: 1.6175e-05 - acc: 1.0000 - val_loss: 1.8658 - val_acc: 0.7029 Epoch 170/300 100/100 [==============================] - 0s 995us/step - loss: 1.6061e-05 - acc: 1.0000 - val_loss: 1.8650 - val_acc: 0.7030 Epoch 171/300 100/100 [==============================] - 0s 994us/step - loss: 1.5953e-05 - acc: 1.0000 - val_loss: 1.8642 - val_acc: 0.7027 Epoch 172/300 100/100 [==============================] - 0s 1ms/step - loss: 1.5856e-05 - acc: 1.0000 - val_loss: 1.8635 - val_acc: 0.7026 Epoch 173/300 100/100 [==============================] - 0s 1ms/step - loss: 1.5757e-05 - acc: 1.0000 - val_loss: 1.8627 - val_acc: 0.7024 Epoch 174/300 100/100 [==============================] - 0s 1ms/step - loss: 1.5654e-05 - acc: 1.0000 - val_loss: 1.8620 - val_acc: 0.7023 Epoch 175/300 100/100 [==============================] - 0s 994us/step - loss: 1.5552e-05 - acc: 1.0000 - val_loss: 1.8613 - val_acc: 0.7022 Epoch 176/300 100/100 [==============================] - 0s 1ms/step - loss: 1.5460e-05 - acc: 1.0000 - val_loss: 1.8606 - val_acc: 0.7020 Epoch 177/300 100/100 [==============================] - 0s 1ms/step - loss: 1.5362e-05 - acc: 1.0000 - val_loss: 1.8599 - val_acc: 0.7020 Epoch 178/300 100/100 [==============================] - 0s 1ms/step - loss: 1.5262e-05 - acc: 1.0000 - val_loss: 1.8592 - val_acc: 0.7021 Epoch 179/300 100/100 [==============================] - 0s 1ms/step - loss: 1.5164e-05 - acc: 1.0000 - val_loss: 1.8586 - val_acc: 0.7021 Epoch 180/300 100/100 [==============================] - 0s 1ms/step - loss: 1.5075e-05 - acc: 1.0000 - val_loss: 1.8579 - val_acc: 0.7019 Epoch 181/300 100/100 [==============================] - 0s 1ms/step - loss: 1.4979e-05 - acc: 1.0000 - val_loss: 1.8573 - val_acc: 0.7018 Epoch 182/300 100/100 [==============================] - 0s 1ms/step - loss: 1.4892e-05 - acc: 1.0000 - val_loss: 1.8567 - val_acc: 0.7018 Epoch 183/300 100/100 [==============================] - 0s 1ms/step - loss: 1.4794e-05 - acc: 1.0000 - val_loss: 1.8561 - val_acc: 0.7018 Epoch 184/300 100/100 [==============================] - 0s 1ms/step - loss: 1.4710e-05 - acc: 1.0000 - val_loss: 1.8554 - val_acc: 0.7016 Epoch 185/300 100/100 [==============================] - 0s 1ms/step - loss: 1.4615e-05 - acc: 1.0000 - val_loss: 1.8549 - val_acc: 0.7016 Epoch 186/300 100/100 [==============================] - 0s 1ms/step - loss: 1.4532e-05 - acc: 1.0000 - val_loss: 1.8543 - val_acc: 0.7017 Epoch 187/300 100/100 [==============================] - 0s 1ms/step - loss: 1.4446e-05 - acc: 1.0000 - val_loss: 1.8537 - val_acc: 0.7016 Epoch 188/300 100/100 [==============================] - 0s 1ms/step - loss: 1.4354e-05 - acc: 1.0000 - val_loss: 1.8531 - val_acc: 0.7015 Epoch 189/300 100/100 [==============================] - 0s 1ms/step - loss: 1.4271e-05 - acc: 1.0000 - val_loss: 1.8526 - val_acc: 0.7017 Epoch 190/300 100/100 [==============================] - 0s 1ms/step - loss: 1.4190e-05 - acc: 1.0000 - val_loss: 1.8520 - val_acc: 0.7018 Epoch 191/300 100/100 [==============================] - 0s 1ms/step - loss: 1.4106e-05 - acc: 1.0000 - val_loss: 1.8515 - val_acc: 0.7019 Epoch 192/300 100/100 [==============================] - 0s 1ms/step - loss: 1.4025e-05 - acc: 1.0000 - val_loss: 1.8510 - val_acc: 0.7016 Epoch 193/300 100/100 [==============================] - 0s 1ms/step - loss: 1.3939e-05 - acc: 1.0000 - val_loss: 1.8505 - val_acc: 0.7014 Epoch 194/300 100/100 [==============================] - 0s 1ms/step - loss: 1.3862e-05 - acc: 1.0000 - val_loss: 1.8499 - val_acc: 0.7013 Epoch 195/300 100/100 [==============================] - 0s 1ms/step - loss: 1.3777e-05 - acc: 1.0000 - val_loss: 1.8494 - val_acc: 0.7013 Epoch 196/300 100/100 [==============================] - 0s 1ms/step - loss: 1.3694e-05 - acc: 1.0000 - val_loss: 1.8490 - val_acc: 0.7012 Epoch 197/300 100/100 [==============================] - 0s 1ms/step - loss: 1.3615e-05 - acc: 1.0000 - val_loss: 1.8485 - val_acc: 0.7011 Epoch 198/300 100/100 [==============================] - 0s 1ms/step - loss: 1.3542e-05 - acc: 1.0000 - val_loss: 1.8480 - val_acc: 0.7011 Epoch 199/300 100/100 [==============================] - 0s 1ms/step - loss: 1.3468e-05 - acc: 1.0000 - val_loss: 1.8475 - val_acc: 0.7010 Epoch 200/300 100/100 [==============================] - 0s 1ms/step - loss: 1.3386e-05 - acc: 1.0000 - val_loss: 1.8471 - val_acc: 0.7008 Epoch 201/300 100/100 [==============================] - 0s 1ms/step - loss: 1.3318e-05 - acc: 1.0000 - val_loss: 1.8466 - val_acc: 0.7008 Epoch 202/300 100/100 [==============================] - 0s 1ms/step - loss: 1.3241e-05 - acc: 1.0000 - val_loss: 1.8462 - val_acc: 0.7008 Epoch 203/300 100/100 [==============================] - 0s 1ms/step - loss: 1.3163e-05 - acc: 1.0000 - val_loss: 1.8457 - val_acc: 0.7007 Epoch 204/300 100/100 [==============================] - 0s 991us/step - loss: 1.3094e-05 - acc: 1.0000 - val_loss: 1.8453 - val_acc: 0.7008 Epoch 205/300 100/100 [==============================] - 0s 999us/step - loss: 1.3018e-05 - acc: 1.0000 - val_loss: 1.8449 - val_acc: 0.7008 Epoch 206/300 100/100 [==============================] - 0s 1ms/step - loss: 1.2946e-05 - acc: 1.0000 - val_loss: 1.8444 - val_acc: 0.7008 Epoch 207/300 100/100 [==============================] - 0s 1ms/step - loss: 1.2875e-05 - acc: 1.0000 - val_loss: 1.8440 - val_acc: 0.7006 Epoch 208/300 100/100 [==============================] - 0s 995us/step - loss: 1.2809e-05 - acc: 1.0000 - val_loss: 1.8436 - val_acc: 0.7006 Epoch 209/300 100/100 [==============================] - 0s 1ms/step - loss: 1.2736e-05 - acc: 1.0000 - val_loss: 1.8432 - val_acc: 0.7006 Epoch 210/300 100/100 [==============================] - 0s 1ms/step - loss: 1.2664e-05 - acc: 1.0000 - val_loss: 1.8428 - val_acc: 0.7006 Epoch 211/300 100/100 [==============================] - 0s 1ms/step - loss: 1.2596e-05 - acc: 1.0000 - val_loss: 1.8424 - val_acc: 0.7006 Epoch 212/300 100/100 [==============================] - 0s 1ms/step - loss: 1.2521e-05 - acc: 1.0000 - val_loss: 1.8421 - val_acc: 0.7005 Epoch 213/300 100/100 [==============================] - 0s 1ms/step - loss: 1.2456e-05 - acc: 1.0000 - val_loss: 1.8417 - val_acc: 0.7007 Epoch 214/300 100/100 [==============================] - 0s 990us/step - loss: 1.2394e-05 - acc: 1.0000 - val_loss: 1.8413 - val_acc: 0.7007 Epoch 215/300 100/100 [==============================] - 0s 992us/step - loss: 1.2319e-05 - acc: 1.0000 - val_loss: 1.8410 - val_acc: 0.7007 Epoch 216/300 100/100 [==============================] - 0s 1ms/step - loss: 1.2254e-05 - acc: 1.0000 - val_loss: 1.8406 - val_acc: 0.7007 Epoch 217/300 100/100 [==============================] - 0s 988us/step - loss: 1.2194e-05 - acc: 1.0000 - val_loss: 1.8403 - val_acc: 0.7007 Epoch 218/300 100/100 [==============================] - 0s 1ms/step - loss: 1.2129e-05 - acc: 1.0000 - val_loss: 1.8399 - val_acc: 0.7006 Epoch 219/300 100/100 [==============================] - 0s 1ms/step - loss: 1.2067e-05 - acc: 1.0000 - val_loss: 1.8396 - val_acc: 0.7004 Epoch 220/300 100/100 [==============================] - 0s 1ms/step - loss: 1.2000e-05 - acc: 1.0000 - val_loss: 1.8393 - val_acc: 0.7005 Epoch 221/300 100/100 [==============================] - 0s 996us/step - loss: 1.1938e-05 - acc: 1.0000 - val_loss: 1.8389 - val_acc: 0.7007 Epoch 222/300 100/100 [==============================] - 0s 1ms/step - loss: 1.1879e-05 - acc: 1.0000 - val_loss: 1.8386 - val_acc: 0.7007 Epoch 223/300 100/100 [==============================] - 0s 995us/step - loss: 1.1814e-05 - acc: 1.0000 - val_loss: 1.8383 - val_acc: 0.7007 Epoch 224/300 100/100 [==============================] - 0s 1ms/step - loss: 1.1757e-05 - acc: 1.0000 - val_loss: 1.8380 - val_acc: 0.7007 Epoch 225/300 100/100 [==============================] - 0s 1ms/step - loss: 1.1695e-05 - acc: 1.0000 - val_loss: 1.8377 - val_acc: 0.7007 Epoch 226/300 100/100 [==============================] - 0s 1ms/step - loss: 1.1637e-05 - acc: 1.0000 - val_loss: 1.8374 - val_acc: 0.7008 Epoch 227/300 100/100 [==============================] - 0s 1ms/step - loss: 1.1574e-05 - acc: 1.0000 - val_loss: 1.8371 - val_acc: 0.7009 Epoch 228/300 100/100 [==============================] - 0s 1ms/step - loss: 1.1519e-05 - acc: 1.0000 - val_loss: 1.8368 - val_acc: 0.7009 Epoch 229/300 100/100 [==============================] - 0s 998us/step - loss: 1.1460e-05 - acc: 1.0000 - val_loss: 1.8365 - val_acc: 0.7011 Epoch 230/300 100/100 [==============================] - 0s 994us/step - loss: 1.1394e-05 - acc: 1.0000 - val_loss: 1.8362 - val_acc: 0.7011 Epoch 231/300 100/100 [==============================] - 0s 1ms/step - loss: 1.1343e-05 - acc: 1.0000 - val_loss: 1.8359 - val_acc: 0.7010 Epoch 232/300 100/100 [==============================] - 0s 994us/step - loss: 1.1283e-05 - acc: 1.0000 - val_loss: 1.8356 - val_acc: 0.7009 Epoch 233/300 100/100 [==============================] - 0s 1ms/step - loss: 1.1225e-05 - acc: 1.0000 - val_loss: 1.8354 - val_acc: 0.7009 Epoch 234/300 100/100 [==============================] - 0s 1000us/step - loss: 1.1170e-05 - acc: 1.0000 - val_loss: 1.8351 - val_acc: 0.7009 Epoch 235/300 100/100 [==============================] - 0s 1ms/step - loss: 1.1115e-05 - acc: 1.0000 - val_loss: 1.8348 - val_acc: 0.7008 Epoch 236/300 100/100 [==============================] - 0s 1ms/step - loss: 1.1062e-05 - acc: 1.0000 - val_loss: 1.8346 - val_acc: 0.7009 Epoch 237/300 100/100 [==============================] - 0s 1ms/step - loss: 1.1004e-05 - acc: 1.0000 - val_loss: 1.8343 - val_acc: 0.7010 Epoch 238/300 100/100 [==============================] - 0s 1ms/step - loss: 1.0947e-05 - acc: 1.0000 - val_loss: 1.8341 - val_acc: 0.7010 Epoch 239/300 100/100 [==============================] - 0s 1ms/step - loss: 1.0898e-05 - acc: 1.0000 - val_loss: 1.8338 - val_acc: 0.7009 Epoch 240/300 100/100 [==============================] - 0s 1ms/step - loss: 1.0843e-05 - acc: 1.0000 - val_loss: 1.8336 - val_acc: 0.7009 Epoch 241/300 100/100 [==============================] - 0s 1ms/step - loss: 1.0793e-05 - acc: 1.0000 - val_loss: 1.8333 - val_acc: 0.7009 Epoch 242/300 100/100 [==============================] - 0s 1ms/step - loss: 1.0740e-05 - acc: 1.0000 - val_loss: 1.8331 - val_acc: 0.7009 Epoch 243/300 100/100 [==============================] - 0s 1ms/step - loss: 1.0688e-05 - acc: 1.0000 - val_loss: 1.8329 - val_acc: 0.7010 Epoch 244/300 100/100 [==============================] - 0s 1ms/step - loss: 1.0637e-05 - acc: 1.0000 - val_loss: 1.8326 - val_acc: 0.7010 Epoch 245/300 100/100 [==============================] - 0s 1ms/step - loss: 1.0587e-05 - acc: 1.0000 - val_loss: 1.8324 - val_acc: 0.7008 Epoch 246/300 100/100 [==============================] - 0s 1ms/step - loss: 1.0532e-05 - acc: 1.0000 - val_loss: 1.8322 - val_acc: 0.7009 Epoch 247/300 100/100 [==============================] - 0s 1ms/step - loss: 1.0476e-05 - acc: 1.0000 - val_loss: 1.8320 - val_acc: 0.7010 Epoch 248/300 100/100 [==============================] - 0s 1ms/step - loss: 1.0427e-05 - acc: 1.0000 - val_loss: 1.8317 - val_acc: 0.7010 Epoch 249/300 100/100 [==============================] - 0s 1ms/step - loss: 1.0380e-05 - acc: 1.0000 - val_loss: 1.8315 - val_acc: 0.7011 Epoch 250/300 100/100 [==============================] - 0s 1ms/step - loss: 1.0328e-05 - acc: 1.0000 - val_loss: 1.8313 - val_acc: 0.7010 Epoch 251/300 100/100 [==============================] - 0s 1ms/step - loss: 1.0277e-05 - acc: 1.0000 - val_loss: 1.8311 - val_acc: 0.7009 Epoch 252/300 100/100 [==============================] - 0s 1ms/step - loss: 1.0236e-05 - acc: 1.0000 - val_loss: 1.8309 - val_acc: 0.7010 Epoch 253/300 100/100 [==============================] - 0s 1ms/step - loss: 1.0187e-05 - acc: 1.0000 - val_loss: 1.8307 - val_acc: 0.7011 Epoch 254/300 100/100 [==============================] - 0s 1ms/step - loss: 1.0135e-05 - acc: 1.0000 - val_loss: 1.8305 - val_acc: 0.7010 Epoch 255/300 100/100 [==============================] - 0s 1ms/step - loss: 1.0094e-05 - acc: 1.0000 - val_loss: 1.8303 - val_acc: 0.7009 Epoch 256/300 100/100 [==============================] - 0s 1ms/step - loss: 1.0043e-05 - acc: 1.0000 - val_loss: 1.8301 - val_acc: 0.7008 Epoch 257/300 100/100 [==============================] - 0s 1ms/step - loss: 9.9958e-06 - acc: 1.0000 - val_loss: 1.8299 - val_acc: 0.7007 Epoch 258/300 100/100 [==============================] - 0s 1ms/step - loss: 9.9475e-06 - acc: 1.0000 - val_loss: 1.8297 - val_acc: 0.7008 Epoch 259/300 100/100 [==============================] - 0s 1ms/step - loss: 9.9004e-06 - acc: 1.0000 - val_loss: 1.8296 - val_acc: 0.7009 Epoch 260/300 100/100 [==============================] - 0s 1ms/step - loss: 9.8599e-06 - acc: 1.0000 - val_loss: 1.8294 - val_acc: 0.7011 Epoch 261/300 100/100 [==============================] - 0s 1ms/step - loss: 9.8104e-06 - acc: 1.0000 - val_loss: 1.8292 - val_acc: 0.7011 Epoch 262/300 100/100 [==============================] - 0s 1ms/step - loss: 9.7675e-06 - acc: 1.0000 - val_loss: 1.8290 - val_acc: 0.7012 Epoch 263/300 100/100 [==============================] - 0s 1ms/step - loss: 9.7192e-06 - acc: 1.0000 - val_loss: 1.8289 - val_acc: 0.7013 Epoch 264/300 100/100 [==============================] - 0s 1ms/step - loss: 9.6835e-06 - acc: 1.0000 - val_loss: 1.8287 - val_acc: 0.7014 Epoch 265/300 100/100 [==============================] - 0s 1ms/step - loss: 9.6364e-06 - acc: 1.0000 - val_loss: 1.8285 - val_acc: 0.7014 Epoch 266/300 100/100 [==============================] - 0s 1ms/step - loss: 9.5947e-06 - acc: 1.0000 - val_loss: 1.8283 - val_acc: 0.7015 Epoch 267/300 100/100 [==============================] - 0s 1ms/step - loss: 9.5505e-06 - acc: 1.0000 - val_loss: 1.8282 - val_acc: 0.7015 Epoch 268/300 100/100 [==============================] - 0s 1ms/step - loss: 9.5035e-06 - acc: 1.0000 - val_loss: 1.8280 - val_acc: 0.7015 Epoch 269/300 100/100 [==============================] - 0s 1ms/step - loss: 9.4599e-06 - acc: 1.0000 - val_loss: 1.8279 - val_acc: 0.7015 Epoch 270/300 100/100 [==============================] - 0s 1ms/step - loss: 9.4206e-06 - acc: 1.0000 - val_loss: 1.8277 - val_acc: 0.7015 Epoch 271/300 100/100 [==============================] - 0s 1ms/step - loss: 9.3807e-06 - acc: 1.0000 - val_loss: 1.8276 - val_acc: 0.7016 Epoch 272/300 100/100 [==============================] - 0s 1ms/step - loss: 9.3366e-06 - acc: 1.0000 - val_loss: 1.8274 - val_acc: 0.7016 Epoch 273/300 100/100 [==============================] - 0s 1ms/step - loss: 9.2919e-06 - acc: 1.0000 - val_loss: 1.8273 - val_acc: 0.7016 Epoch 274/300 100/100 [==============================] - 0s 1ms/step - loss: 9.2537e-06 - acc: 1.0000 - val_loss: 1.8271 - val_acc: 0.7017 Epoch 275/300 100/100 [==============================] - 0s 1ms/step - loss: 9.2203e-06 - acc: 1.0000 - val_loss: 1.8270 - val_acc: 0.7018 Epoch 276/300 100/100 [==============================] - 0s 1ms/step - loss: 9.1786e-06 - acc: 1.0000 - val_loss: 1.8268 - val_acc: 0.7017 Epoch 277/300 100/100 [==============================] - 0s 1ms/step - loss: 9.1309e-06 - acc: 1.0000 - val_loss: 1.8267 - val_acc: 0.7018 Epoch 278/300 100/100 [==============================] - 0s 1ms/step - loss: 9.0886e-06 - acc: 1.0000 - val_loss: 1.8265 - val_acc: 0.7018 Epoch 279/300 100/100 [==============================] - 0s 1ms/step - loss: 9.0528e-06 - acc: 1.0000 - val_loss: 1.8264 - val_acc: 0.7018 Epoch 280/300 100/100 [==============================] - 0s 1ms/step - loss: 9.0171e-06 - acc: 1.0000 - val_loss: 1.8263 - val_acc: 0.7017 Epoch 281/300 100/100 [==============================] - 0s 1ms/step - loss: 8.9771e-06 - acc: 1.0000 - val_loss: 1.8262 - val_acc: 0.7018 Epoch 282/300 100/100 [==============================] - 0s 1ms/step - loss: 8.9384e-06 - acc: 1.0000 - val_loss: 1.8260 - val_acc: 0.7018 Epoch 283/300 100/100 [==============================] - 0s 1ms/step - loss: 8.9026e-06 - acc: 1.0000 - val_loss: 1.8259 - val_acc: 0.7018 Epoch 284/300 100/100 [==============================] - 0s 1ms/step - loss: 8.8651e-06 - acc: 1.0000 - val_loss: 1.8258 - val_acc: 0.7018 Epoch 285/300 100/100 [==============================] - 0s 1ms/step - loss: 8.8257e-06 - acc: 1.0000 - val_loss: 1.8256 - val_acc: 0.7017 Epoch 286/300 100/100 [==============================] - 0s 1ms/step - loss: 8.7900e-06 - acc: 1.0000 - val_loss: 1.8255 - val_acc: 0.7016 Epoch 287/300 100/100 [==============================] - 0s 1ms/step - loss: 8.7483e-06 - acc: 1.0000 - val_loss: 1.8254 - val_acc: 0.7018 Epoch 288/300 100/100 [==============================] - 0s 1ms/step - loss: 8.7113e-06 - acc: 1.0000 - val_loss: 1.8253 - val_acc: 0.7019 Epoch 289/300 100/100 [==============================] - 0s 1ms/step - loss: 8.6773e-06 - acc: 1.0000 - val_loss: 1.8252 - val_acc: 0.7018 Epoch 290/300 100/100 [==============================] - 0s 1ms/step - loss: 8.6404e-06 - acc: 1.0000 - val_loss: 1.8251 - val_acc: 0.7018 Epoch 291/300 100/100 [==============================] - 0s 1ms/step - loss: 8.6040e-06 - acc: 1.0000 - val_loss: 1.8249 - val_acc: 0.7018 Epoch 292/300 100/100 [==============================] - 0s 1ms/step - loss: 8.5724e-06 - acc: 1.0000 - val_loss: 1.8248 - val_acc: 0.7017 Epoch 293/300 100/100 [==============================] - 0s 1ms/step - loss: 8.5361e-06 - acc: 1.0000 - val_loss: 1.8247 - val_acc: 0.7018 Epoch 294/300 100/100 [==============================] - 0s 1ms/step - loss: 8.4991e-06 - acc: 1.0000 - val_loss: 1.8246 - val_acc: 0.7018 Epoch 295/300 100/100 [==============================] - 0s 1ms/step - loss: 8.4592e-06 - acc: 1.0000 - val_loss: 1.8245 - val_acc: 0.7018 Epoch 296/300 100/100 [==============================] - 0s 1ms/step - loss: 8.4258e-06 - acc: 1.0000 - val_loss: 1.8244 - val_acc: 0.7018 Epoch 297/300 100/100 [==============================] - 0s 1ms/step - loss: 8.3894e-06 - acc: 1.0000 - val_loss: 1.8243 - val_acc: 0.7017 Epoch 298/300 100/100 [==============================] - 0s 1ms/step - loss: 8.3501e-06 - acc: 1.0000 - val_loss: 1.8242 - val_acc: 0.7017 Epoch 299/300 100/100 [==============================] - 0s 1ms/step - loss: 8.3197e-06 - acc: 1.0000 - val_loss: 1.8241 - val_acc: 0.7017 Epoch 300/300 100/100 [==============================] - 0s 1ms/step - loss: 8.2887e-06 - acc: 1.0000 - val_loss: 1.8240 - val_acc: 0.7017
<keras.callbacks.History at 0x7f18040c4668>
show_computational_graph(graph_def=tf.get_default_graph())
K.clear_session()
layer_sizes = [784, 1000, 500, 250, 250, 250, 10]
L = len(layer_sizes) - 1 # number of layers
num_examples = 60000
num_epochs = 150
starter_learning_rate = 0.02
decay_after = 15 # epoch after which to begin learning rate decay
batch_size = 100
num_iter = (num_examples//batch_size) * num_epochs # number of loop iterations
print( "=== Starting Session ===" )
sess = tf.Session()
=== Starting Session ===
i_iter = 0
ckpt = tf.train.get_checkpoint_state('checkpoints/') # get latest checkpoint (if any)
if ckpt and ckpt.model_checkpoint_path:
# if checkpoint exists, restore the parameters and set epoch_n and i_iter
saver.restore(sess, ckpt.model_checkpoint_path)
epoch_n = int(ckpt.model_checkpoint_path.split('-')[1])
# EDITED
# https://github.com/rinuboney/ladder/issues/12#issuecomment-332497271
#i_iter = (epoch_n+1) * (num_examples/batch_size)
i_iter = (epoch_n+1) * (num_examples//batch_size)
print( "Restored Epoch ", epoch_n )
else:
# no checkpoint exists. create checkpoints directory if it does not exist.
if not os.path.exists('checkpoints'):
os.makedirs('checkpoints')
init = tf.global_variables_initializer()
sess.run(init)
print( "=== Training ===" )
print( "Initial Accuracy: {}%", sess.run(accuracy,
feed_dict={
inputs: mnist.semi_test.images,
outputs: mnist.semi_test.labels,
training: False}) )
--------------------------------------------------------------------------- NameError Traceback (most recent call last) <ipython-input-41-e56aae9ef8d0> in <module>() 16 os.makedirs('checkpoints') 17 init = tf.global_variables_initializer() ---> 18 sess.run(init) 19 20 print( "=== Training ===" ) NameError: name 'sess' is not defined
for i in tqdm.tqdm(range(i_iter, num_iter)):
images, labels = mnist.semi_train.next_batch(batch_size)
sess.run(train_step, feed_dict={inputs: images, outputs: labels, training: True})
if (i > 1) and ((i+1) % (num_iter//num_epochs) == 0):
epoch_n = i//(num_examples//batch_size)
if (epoch_n+1) >= decay_after:
# decay learning rate
# learning_rate = starter_learning_rate * ((num_epochs - epoch_n) / (num_epochs - decay_after))
ratio = 1.0 * (num_epochs - (epoch_n+1)) # epoch_n + 1 because learning rate is set for next epoch
ratio = max(0, ratio / (num_epochs - decay_after))
sess.run(learning_rate.assign(starter_learning_rate * ratio))
#saver.save(sess, 'checkpoints/model.ckpt', epoch_n)
# print( "Epoch ", epoch_n, ", Accuracy: ", sess.run(accuracy, feed_dict={inputs: mnist.test.images, outputs:mnist.test.labels, training: False}), "%" )
#with open('train_log', 'a') as train_log:
# # write test accuracy to file "train_log"
# train_log_w = csv.writer(train_log)
# log_i = [epoch_n] + sess.run([accuracy], feed_dict={inputs: mnist.test.images, outputs: mnist.test.labels, training: False})
# train_log_w.writerow(log_i)
print( "Final Accuracy: ", sess.run(accuracy,
feed_dict={
inputs: mnist.semi_test.images,
outputs: mnist.semi_test.labels,
training: False}), "%" )
sess.close()
13%|█▎ | 11417/90000 [02:55<26:29, 49.45it/s]