얕은 신경망을 만들어 MNIST 숫자를 분류합니다.
from tensorflow import keras
from tensorflow.keras.datasets import mnist
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from tensorflow.keras.optimizers import SGD
from matplotlib import pyplot as plt
(X_train, y_train), (X_valid, y_valid) = mnist.load_data()
Downloading data from https://storage.googleapis.com/tensorflow/tf-keras-datasets/mnist.npz 11490434/11490434 [==============================] - 0s 0us/step
X_train.shape
(60000, 28, 28)
y_train.shape
(60000,)
y_train[0:12]
array([5, 0, 4, 1, 9, 2, 1, 3, 1, 4, 3, 5], dtype=uint8)
plt.figure(figsize=(5,5))
for k in range(12):
plt.subplot(3, 4, k+1)
plt.imshow(X_train[k], cmap='Greys')
plt.axis('off')
plt.tight_layout()
plt.show()
X_valid.shape
(10000, 28, 28)
y_valid.shape
(10000,)
plt.imshow(X_valid[0], cmap='Greys')
plt.show()
X_valid[0]
array([[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 84, 185, 159, 151, 60, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 222, 254, 254, 254, 254, 241, 198, 198, 198, 198, 198, 198, 198, 198, 170, 52, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 67, 114, 72, 114, 163, 227, 254, 225, 254, 254, 254, 250, 229, 254, 254, 140, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17, 66, 14, 67, 67, 67, 59, 21, 236, 254, 106, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 83, 253, 209, 18, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 233, 255, 83, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 129, 254, 238, 44, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 59, 249, 254, 62, 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 133, 254, 187, 5, 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 205, 248, 58, 0, 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 126, 254, 182, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 75, 251, 240, 57, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 221, 254, 166, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 203, 254, 219, 35, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 38, 254, 254, 77, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 224, 254, 115, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 133, 254, 254, 52, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 61, 242, 254, 254, 52, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 121, 254, 254, 219, 40, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 121, 254, 207, 18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=uint8)
y_valid[0]
7
X_train = X_train.reshape(60000, 784).astype('float32')
X_valid = X_valid.reshape(10000, 784).astype('float32')
X_train /= 255
X_valid /= 255
X_valid[0]
array([0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.32941177, 0.7254902 , 0.62352943, 0.5921569 , 0.23529412, 0.14117648, 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.87058824, 0.99607843, 0.99607843, 0.99607843, 0.99607843, 0.94509804, 0.7764706 , 0.7764706 , 0.7764706 , 0.7764706 , 0.7764706 , 0.7764706 , 0.7764706 , 0.7764706 , 0.6666667 , 0.20392157, 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.2627451 , 0.44705883, 0.28235295, 0.44705883, 0.6392157 , 0.8901961 , 0.99607843, 0.88235295, 0.99607843, 0.99607843, 0.99607843, 0.98039216, 0.8980392 , 0.99607843, 0.99607843, 0.54901963, 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.06666667, 0.25882354, 0.05490196, 0.2627451 , 0.2627451 , 0.2627451 , 0.23137255, 0.08235294, 0.9254902 , 0.99607843, 0.41568628, 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.3254902 , 0.99215686, 0.81960785, 0.07058824, 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.08627451, 0.9137255 , 1. , 0.3254902 , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.5058824 , 0.99607843, 0.93333334, 0.17254902, 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.23137255, 0.9764706 , 0.99607843, 0.24313726, 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.52156866, 0.99607843, 0.73333335, 0.01960784, 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.03529412, 0.8039216 , 0.972549 , 0.22745098, 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.49411765, 0.99607843, 0.7137255 , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.29411766, 0.9843137 , 0.9411765 , 0.22352941, 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.07450981, 0.8666667 , 0.99607843, 0.6509804 , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.01176471, 0.79607844, 0.99607843, 0.85882354, 0.13725491, 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.14901961, 0.99607843, 0.99607843, 0.3019608 , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.12156863, 0.8784314 , 0.99607843, 0.4509804 , 0.00392157, 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.52156866, 0.99607843, 0.99607843, 0.20392157, 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.23921569, 0.9490196 , 0.99607843, 0.99607843, 0.20392157, 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.4745098 , 0.99607843, 0.99607843, 0.85882354, 0.15686275, 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.4745098 , 0.99607843, 0.8117647 , 0.07058824, 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ], dtype=float32)
n_classes = 10
y_train = keras.utils.to_categorical(y_train, n_classes)
y_valid = keras.utils.to_categorical(y_valid, n_classes)
y_valid[0]
array([0., 0., 0., 0., 0., 0., 0., 1., 0., 0.], dtype=float32)
model = Sequential()
model.add(Dense(64, activation='sigmoid', input_shape=(784,)))
model.add(Dense(10, activation='softmax'))
model.summary()
Model: "sequential" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense (Dense) (None, 64) 50240 dense_1 (Dense) (None, 10) 650 ================================================================= Total params: 50,890 Trainable params: 50,890 Non-trainable params: 0 _________________________________________________________________
(64*784)
50176
(64*784)+64
50240
(10*64)+10
650
model.compile(loss='mean_squared_error', optimizer=SGD(learning_rate=0.01), metrics=['accuracy'])
model.fit(X_train, y_train, batch_size=128, epochs=200, verbose=1, validation_data=(X_valid, y_valid))
Epoch 1/200 469/469 [==============================] - 6s 6ms/step - loss: 0.0962 - accuracy: 0.0877 - val_loss: 0.0938 - val_accuracy: 0.0913 Epoch 2/200 469/469 [==============================] - 3s 6ms/step - loss: 0.0928 - accuracy: 0.0930 - val_loss: 0.0918 - val_accuracy: 0.1056 Epoch 3/200 469/469 [==============================] - 3s 6ms/step - loss: 0.0914 - accuracy: 0.1230 - val_loss: 0.0908 - val_accuracy: 0.1506 Epoch 4/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0906 - accuracy: 0.1632 - val_loss: 0.0902 - val_accuracy: 0.1775 Epoch 5/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0900 - accuracy: 0.1816 - val_loss: 0.0897 - val_accuracy: 0.1918 Epoch 6/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0895 - accuracy: 0.1964 - val_loss: 0.0892 - val_accuracy: 0.2127 Epoch 7/200 469/469 [==============================] - 1s 3ms/step - loss: 0.0891 - accuracy: 0.2178 - val_loss: 0.0888 - val_accuracy: 0.2354 Epoch 8/200 469/469 [==============================] - 1s 3ms/step - loss: 0.0887 - accuracy: 0.2364 - val_loss: 0.0884 - val_accuracy: 0.2542 Epoch 9/200 469/469 [==============================] - 1s 3ms/step - loss: 0.0883 - accuracy: 0.2571 - val_loss: 0.0880 - val_accuracy: 0.2766 Epoch 10/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0879 - accuracy: 0.2883 - val_loss: 0.0877 - val_accuracy: 0.3176 Epoch 11/200 469/469 [==============================] - 1s 3ms/step - loss: 0.0875 - accuracy: 0.3277 - val_loss: 0.0873 - val_accuracy: 0.3560 Epoch 12/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0872 - accuracy: 0.3574 - val_loss: 0.0869 - val_accuracy: 0.3729 Epoch 13/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0868 - accuracy: 0.3719 - val_loss: 0.0866 - val_accuracy: 0.3858 Epoch 14/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0864 - accuracy: 0.3805 - val_loss: 0.0862 - val_accuracy: 0.3930 Epoch 15/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0861 - accuracy: 0.3876 - val_loss: 0.0858 - val_accuracy: 0.3957 Epoch 16/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0857 - accuracy: 0.3898 - val_loss: 0.0854 - val_accuracy: 0.4029 Epoch 17/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0853 - accuracy: 0.3950 - val_loss: 0.0850 - val_accuracy: 0.4083 Epoch 18/200 469/469 [==============================] - 1s 3ms/step - loss: 0.0849 - accuracy: 0.3989 - val_loss: 0.0846 - val_accuracy: 0.4146 Epoch 19/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0845 - accuracy: 0.4043 - val_loss: 0.0842 - val_accuracy: 0.4189 Epoch 20/200 469/469 [==============================] - 2s 5ms/step - loss: 0.0841 - accuracy: 0.4101 - val_loss: 0.0838 - val_accuracy: 0.4238 Epoch 21/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0837 - accuracy: 0.4142 - val_loss: 0.0834 - val_accuracy: 0.4297 Epoch 22/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0833 - accuracy: 0.4186 - val_loss: 0.0830 - val_accuracy: 0.4331 Epoch 23/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0829 - accuracy: 0.4231 - val_loss: 0.0825 - val_accuracy: 0.4376 Epoch 24/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0824 - accuracy: 0.4294 - val_loss: 0.0821 - val_accuracy: 0.4427 Epoch 25/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0820 - accuracy: 0.4347 - val_loss: 0.0817 - val_accuracy: 0.4466 Epoch 26/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0816 - accuracy: 0.4385 - val_loss: 0.0812 - val_accuracy: 0.4503 Epoch 27/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0811 - accuracy: 0.4434 - val_loss: 0.0807 - val_accuracy: 0.4562 Epoch 28/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0807 - accuracy: 0.4484 - val_loss: 0.0803 - val_accuracy: 0.4611 Epoch 29/200 469/469 [==============================] - 1s 3ms/step - loss: 0.0802 - accuracy: 0.4527 - val_loss: 0.0798 - val_accuracy: 0.4659 Epoch 30/200 469/469 [==============================] - 1s 3ms/step - loss: 0.0797 - accuracy: 0.4588 - val_loss: 0.0793 - val_accuracy: 0.4700 Epoch 31/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0792 - accuracy: 0.4633 - val_loss: 0.0788 - val_accuracy: 0.4755 Epoch 32/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0788 - accuracy: 0.4685 - val_loss: 0.0783 - val_accuracy: 0.4801 Epoch 33/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0783 - accuracy: 0.4718 - val_loss: 0.0778 - val_accuracy: 0.4844 Epoch 34/200 469/469 [==============================] - 1s 3ms/step - loss: 0.0778 - accuracy: 0.4772 - val_loss: 0.0773 - val_accuracy: 0.4892 Epoch 35/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0773 - accuracy: 0.4821 - val_loss: 0.0768 - val_accuracy: 0.4926 Epoch 36/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0768 - accuracy: 0.4862 - val_loss: 0.0763 - val_accuracy: 0.4962 Epoch 37/200 469/469 [==============================] - 1s 3ms/step - loss: 0.0763 - accuracy: 0.4902 - val_loss: 0.0758 - val_accuracy: 0.4991 Epoch 38/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0757 - accuracy: 0.4945 - val_loss: 0.0753 - val_accuracy: 0.5025 Epoch 39/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0752 - accuracy: 0.4976 - val_loss: 0.0747 - val_accuracy: 0.5051 Epoch 40/200 469/469 [==============================] - 1s 3ms/step - loss: 0.0747 - accuracy: 0.5021 - val_loss: 0.0742 - val_accuracy: 0.5091 Epoch 41/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0742 - accuracy: 0.5049 - val_loss: 0.0737 - val_accuracy: 0.5134 Epoch 42/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0737 - accuracy: 0.5083 - val_loss: 0.0731 - val_accuracy: 0.5171 Epoch 43/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0731 - accuracy: 0.5116 - val_loss: 0.0726 - val_accuracy: 0.5204 Epoch 44/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0726 - accuracy: 0.5152 - val_loss: 0.0721 - val_accuracy: 0.5237 Epoch 45/200 469/469 [==============================] - 1s 3ms/step - loss: 0.0721 - accuracy: 0.5187 - val_loss: 0.0715 - val_accuracy: 0.5271 Epoch 46/200 469/469 [==============================] - 1s 3ms/step - loss: 0.0716 - accuracy: 0.5220 - val_loss: 0.0710 - val_accuracy: 0.5294 Epoch 47/200 469/469 [==============================] - 1s 3ms/step - loss: 0.0710 - accuracy: 0.5247 - val_loss: 0.0704 - val_accuracy: 0.5323 Epoch 48/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0705 - accuracy: 0.5282 - val_loss: 0.0699 - val_accuracy: 0.5358 Epoch 49/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0700 - accuracy: 0.5307 - val_loss: 0.0694 - val_accuracy: 0.5399 Epoch 50/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0694 - accuracy: 0.5336 - val_loss: 0.0688 - val_accuracy: 0.5430 Epoch 51/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0689 - accuracy: 0.5368 - val_loss: 0.0683 - val_accuracy: 0.5456 Epoch 52/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0684 - accuracy: 0.5402 - val_loss: 0.0678 - val_accuracy: 0.5480 Epoch 53/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0679 - accuracy: 0.5432 - val_loss: 0.0673 - val_accuracy: 0.5505 Epoch 54/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0674 - accuracy: 0.5459 - val_loss: 0.0667 - val_accuracy: 0.5534 Epoch 55/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0668 - accuracy: 0.5487 - val_loss: 0.0662 - val_accuracy: 0.5585 Epoch 56/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0663 - accuracy: 0.5520 - val_loss: 0.0657 - val_accuracy: 0.5621 Epoch 57/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0658 - accuracy: 0.5558 - val_loss: 0.0652 - val_accuracy: 0.5657 Epoch 58/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0653 - accuracy: 0.5590 - val_loss: 0.0647 - val_accuracy: 0.5694 Epoch 59/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0648 - accuracy: 0.5627 - val_loss: 0.0642 - val_accuracy: 0.5728 Epoch 60/200 469/469 [==============================] - 2s 5ms/step - loss: 0.0643 - accuracy: 0.5664 - val_loss: 0.0637 - val_accuracy: 0.5772 Epoch 61/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0638 - accuracy: 0.5699 - val_loss: 0.0632 - val_accuracy: 0.5815 Epoch 62/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0634 - accuracy: 0.5745 - val_loss: 0.0627 - val_accuracy: 0.5867 Epoch 63/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0629 - accuracy: 0.5781 - val_loss: 0.0622 - val_accuracy: 0.5911 Epoch 64/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0624 - accuracy: 0.5825 - val_loss: 0.0617 - val_accuracy: 0.5961 Epoch 65/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0619 - accuracy: 0.5877 - val_loss: 0.0613 - val_accuracy: 0.6007 Epoch 66/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0615 - accuracy: 0.5926 - val_loss: 0.0608 - val_accuracy: 0.6063 Epoch 67/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0610 - accuracy: 0.5980 - val_loss: 0.0603 - val_accuracy: 0.6115 Epoch 68/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0606 - accuracy: 0.6036 - val_loss: 0.0599 - val_accuracy: 0.6165 Epoch 69/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0601 - accuracy: 0.6089 - val_loss: 0.0594 - val_accuracy: 0.6213 Epoch 70/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0597 - accuracy: 0.6137 - val_loss: 0.0590 - val_accuracy: 0.6266 Epoch 71/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0592 - accuracy: 0.6187 - val_loss: 0.0585 - val_accuracy: 0.6313 Epoch 72/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0588 - accuracy: 0.6241 - val_loss: 0.0581 - val_accuracy: 0.6359 Epoch 73/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0584 - accuracy: 0.6285 - val_loss: 0.0576 - val_accuracy: 0.6420 Epoch 74/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0579 - accuracy: 0.6338 - val_loss: 0.0572 - val_accuracy: 0.6473 Epoch 75/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0575 - accuracy: 0.6381 - val_loss: 0.0568 - val_accuracy: 0.6520 Epoch 76/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0571 - accuracy: 0.6436 - val_loss: 0.0564 - val_accuracy: 0.6578 Epoch 77/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0567 - accuracy: 0.6485 - val_loss: 0.0559 - val_accuracy: 0.6638 Epoch 78/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0563 - accuracy: 0.6529 - val_loss: 0.0555 - val_accuracy: 0.6678 Epoch 79/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0559 - accuracy: 0.6576 - val_loss: 0.0551 - val_accuracy: 0.6712 Epoch 80/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0555 - accuracy: 0.6629 - val_loss: 0.0547 - val_accuracy: 0.6749 Epoch 81/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0551 - accuracy: 0.6671 - val_loss: 0.0543 - val_accuracy: 0.6780 Epoch 82/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0547 - accuracy: 0.6722 - val_loss: 0.0539 - val_accuracy: 0.6821 Epoch 83/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0543 - accuracy: 0.6765 - val_loss: 0.0535 - val_accuracy: 0.6860 Epoch 84/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0539 - accuracy: 0.6804 - val_loss: 0.0531 - val_accuracy: 0.6899 Epoch 85/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0535 - accuracy: 0.6839 - val_loss: 0.0528 - val_accuracy: 0.6938 Epoch 86/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0532 - accuracy: 0.6884 - val_loss: 0.0524 - val_accuracy: 0.6977 Epoch 87/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0528 - accuracy: 0.6923 - val_loss: 0.0520 - val_accuracy: 0.7023 Epoch 88/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0524 - accuracy: 0.6966 - val_loss: 0.0516 - val_accuracy: 0.7063 Epoch 89/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0521 - accuracy: 0.6999 - val_loss: 0.0513 - val_accuracy: 0.7102 Epoch 90/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0517 - accuracy: 0.7035 - val_loss: 0.0509 - val_accuracy: 0.7128 Epoch 91/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0513 - accuracy: 0.7064 - val_loss: 0.0505 - val_accuracy: 0.7169 Epoch 92/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0510 - accuracy: 0.7099 - val_loss: 0.0502 - val_accuracy: 0.7193 Epoch 93/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0506 - accuracy: 0.7126 - val_loss: 0.0498 - val_accuracy: 0.7230 Epoch 94/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0503 - accuracy: 0.7155 - val_loss: 0.0495 - val_accuracy: 0.7257 Epoch 95/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0499 - accuracy: 0.7182 - val_loss: 0.0491 - val_accuracy: 0.7288 Epoch 96/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0496 - accuracy: 0.7204 - val_loss: 0.0488 - val_accuracy: 0.7308 Epoch 97/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0493 - accuracy: 0.7233 - val_loss: 0.0484 - val_accuracy: 0.7331 Epoch 98/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0489 - accuracy: 0.7254 - val_loss: 0.0481 - val_accuracy: 0.7362 Epoch 99/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0486 - accuracy: 0.7283 - val_loss: 0.0478 - val_accuracy: 0.7385 Epoch 100/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0483 - accuracy: 0.7308 - val_loss: 0.0474 - val_accuracy: 0.7415 Epoch 101/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0480 - accuracy: 0.7329 - val_loss: 0.0471 - val_accuracy: 0.7436 Epoch 102/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0476 - accuracy: 0.7352 - val_loss: 0.0468 - val_accuracy: 0.7451 Epoch 103/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0473 - accuracy: 0.7378 - val_loss: 0.0465 - val_accuracy: 0.7472 Epoch 104/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0470 - accuracy: 0.7405 - val_loss: 0.0461 - val_accuracy: 0.7504 Epoch 105/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0467 - accuracy: 0.7430 - val_loss: 0.0458 - val_accuracy: 0.7523 Epoch 106/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0464 - accuracy: 0.7446 - val_loss: 0.0455 - val_accuracy: 0.7552 Epoch 107/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0461 - accuracy: 0.7468 - val_loss: 0.0452 - val_accuracy: 0.7561 Epoch 108/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0458 - accuracy: 0.7491 - val_loss: 0.0449 - val_accuracy: 0.7591 Epoch 109/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0455 - accuracy: 0.7511 - val_loss: 0.0446 - val_accuracy: 0.7609 Epoch 110/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0452 - accuracy: 0.7533 - val_loss: 0.0443 - val_accuracy: 0.7630 Epoch 111/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0449 - accuracy: 0.7555 - val_loss: 0.0440 - val_accuracy: 0.7648 Epoch 112/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0446 - accuracy: 0.7572 - val_loss: 0.0437 - val_accuracy: 0.7674 Epoch 113/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0443 - accuracy: 0.7589 - val_loss: 0.0434 - val_accuracy: 0.7694 Epoch 114/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0440 - accuracy: 0.7610 - val_loss: 0.0431 - val_accuracy: 0.7713 Epoch 115/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0437 - accuracy: 0.7633 - val_loss: 0.0428 - val_accuracy: 0.7728 Epoch 116/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0435 - accuracy: 0.7653 - val_loss: 0.0426 - val_accuracy: 0.7746 Epoch 117/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0432 - accuracy: 0.7671 - val_loss: 0.0423 - val_accuracy: 0.7763 Epoch 118/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0429 - accuracy: 0.7687 - val_loss: 0.0420 - val_accuracy: 0.7782 Epoch 119/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0426 - accuracy: 0.7711 - val_loss: 0.0417 - val_accuracy: 0.7803 Epoch 120/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0424 - accuracy: 0.7730 - val_loss: 0.0415 - val_accuracy: 0.7834 Epoch 121/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0421 - accuracy: 0.7749 - val_loss: 0.0412 - val_accuracy: 0.7859 Epoch 122/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0419 - accuracy: 0.7770 - val_loss: 0.0409 - val_accuracy: 0.7883 Epoch 123/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0416 - accuracy: 0.7789 - val_loss: 0.0407 - val_accuracy: 0.7899 Epoch 124/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0413 - accuracy: 0.7806 - val_loss: 0.0404 - val_accuracy: 0.7916 Epoch 125/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0411 - accuracy: 0.7829 - val_loss: 0.0402 - val_accuracy: 0.7933 Epoch 126/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0408 - accuracy: 0.7850 - val_loss: 0.0399 - val_accuracy: 0.7953 Epoch 127/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0406 - accuracy: 0.7864 - val_loss: 0.0397 - val_accuracy: 0.7971 Epoch 128/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0403 - accuracy: 0.7886 - val_loss: 0.0394 - val_accuracy: 0.7983 Epoch 129/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0401 - accuracy: 0.7905 - val_loss: 0.0392 - val_accuracy: 0.8001 Epoch 130/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0399 - accuracy: 0.7926 - val_loss: 0.0389 - val_accuracy: 0.8020 Epoch 131/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0396 - accuracy: 0.7938 - val_loss: 0.0387 - val_accuracy: 0.8042 Epoch 132/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0394 - accuracy: 0.7960 - val_loss: 0.0384 - val_accuracy: 0.8060 Epoch 133/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0391 - accuracy: 0.7982 - val_loss: 0.0382 - val_accuracy: 0.8074 Epoch 134/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0389 - accuracy: 0.8001 - val_loss: 0.0380 - val_accuracy: 0.8096 Epoch 135/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0387 - accuracy: 0.8016 - val_loss: 0.0378 - val_accuracy: 0.8110 Epoch 136/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0385 - accuracy: 0.8034 - val_loss: 0.0375 - val_accuracy: 0.8129 Epoch 137/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0382 - accuracy: 0.8053 - val_loss: 0.0373 - val_accuracy: 0.8148 Epoch 138/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0380 - accuracy: 0.8070 - val_loss: 0.0371 - val_accuracy: 0.8165 Epoch 139/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0378 - accuracy: 0.8089 - val_loss: 0.0369 - val_accuracy: 0.8178 Epoch 140/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0376 - accuracy: 0.8102 - val_loss: 0.0366 - val_accuracy: 0.8195 Epoch 141/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0374 - accuracy: 0.8120 - val_loss: 0.0364 - val_accuracy: 0.8216 Epoch 142/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0372 - accuracy: 0.8134 - val_loss: 0.0362 - val_accuracy: 0.8242 Epoch 143/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0370 - accuracy: 0.8150 - val_loss: 0.0360 - val_accuracy: 0.8259 Epoch 144/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0368 - accuracy: 0.8166 - val_loss: 0.0358 - val_accuracy: 0.8269 Epoch 145/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0365 - accuracy: 0.8179 - val_loss: 0.0356 - val_accuracy: 0.8283 Epoch 146/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0363 - accuracy: 0.8190 - val_loss: 0.0354 - val_accuracy: 0.8291 Epoch 147/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0361 - accuracy: 0.8206 - val_loss: 0.0352 - val_accuracy: 0.8302 Epoch 148/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0360 - accuracy: 0.8220 - val_loss: 0.0350 - val_accuracy: 0.8309 Epoch 149/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0358 - accuracy: 0.8230 - val_loss: 0.0348 - val_accuracy: 0.8320 Epoch 150/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0356 - accuracy: 0.8245 - val_loss: 0.0346 - val_accuracy: 0.8337 Epoch 151/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0354 - accuracy: 0.8255 - val_loss: 0.0344 - val_accuracy: 0.8352 Epoch 152/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0352 - accuracy: 0.8265 - val_loss: 0.0342 - val_accuracy: 0.8361 Epoch 153/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0350 - accuracy: 0.8284 - val_loss: 0.0340 - val_accuracy: 0.8374 Epoch 154/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0348 - accuracy: 0.8291 - val_loss: 0.0338 - val_accuracy: 0.8381 Epoch 155/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0346 - accuracy: 0.8306 - val_loss: 0.0337 - val_accuracy: 0.8394 Epoch 156/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0345 - accuracy: 0.8316 - val_loss: 0.0335 - val_accuracy: 0.8409 Epoch 157/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0343 - accuracy: 0.8325 - val_loss: 0.0333 - val_accuracy: 0.8417 Epoch 158/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0341 - accuracy: 0.8336 - val_loss: 0.0331 - val_accuracy: 0.8426 Epoch 159/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0339 - accuracy: 0.8344 - val_loss: 0.0330 - val_accuracy: 0.8434 Epoch 160/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0338 - accuracy: 0.8354 - val_loss: 0.0328 - val_accuracy: 0.8443 Epoch 161/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0336 - accuracy: 0.8363 - val_loss: 0.0326 - val_accuracy: 0.8456 Epoch 162/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0334 - accuracy: 0.8373 - val_loss: 0.0324 - val_accuracy: 0.8463 Epoch 163/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0333 - accuracy: 0.8388 - val_loss: 0.0323 - val_accuracy: 0.8472 Epoch 164/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0331 - accuracy: 0.8394 - val_loss: 0.0321 - val_accuracy: 0.8483 Epoch 165/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0329 - accuracy: 0.8404 - val_loss: 0.0319 - val_accuracy: 0.8500 Epoch 166/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0328 - accuracy: 0.8410 - val_loss: 0.0318 - val_accuracy: 0.8501 Epoch 167/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0326 - accuracy: 0.8419 - val_loss: 0.0316 - val_accuracy: 0.8504 Epoch 168/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0324 - accuracy: 0.8428 - val_loss: 0.0315 - val_accuracy: 0.8508 Epoch 169/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0323 - accuracy: 0.8437 - val_loss: 0.0313 - val_accuracy: 0.8517 Epoch 170/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0321 - accuracy: 0.8442 - val_loss: 0.0312 - val_accuracy: 0.8520 Epoch 171/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0320 - accuracy: 0.8450 - val_loss: 0.0310 - val_accuracy: 0.8529 Epoch 172/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0318 - accuracy: 0.8457 - val_loss: 0.0309 - val_accuracy: 0.8536 Epoch 173/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0317 - accuracy: 0.8465 - val_loss: 0.0307 - val_accuracy: 0.8541 Epoch 174/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0315 - accuracy: 0.8471 - val_loss: 0.0306 - val_accuracy: 0.8547 Epoch 175/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0314 - accuracy: 0.8476 - val_loss: 0.0304 - val_accuracy: 0.8552 Epoch 176/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0313 - accuracy: 0.8484 - val_loss: 0.0303 - val_accuracy: 0.8559 Epoch 177/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0311 - accuracy: 0.8492 - val_loss: 0.0301 - val_accuracy: 0.8566 Epoch 178/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0310 - accuracy: 0.8498 - val_loss: 0.0300 - val_accuracy: 0.8567 Epoch 179/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0308 - accuracy: 0.8505 - val_loss: 0.0299 - val_accuracy: 0.8576 Epoch 180/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0307 - accuracy: 0.8513 - val_loss: 0.0297 - val_accuracy: 0.8582 Epoch 181/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0306 - accuracy: 0.8517 - val_loss: 0.0296 - val_accuracy: 0.8588 Epoch 182/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0304 - accuracy: 0.8522 - val_loss: 0.0295 - val_accuracy: 0.8596 Epoch 183/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0303 - accuracy: 0.8528 - val_loss: 0.0293 - val_accuracy: 0.8596 Epoch 184/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0302 - accuracy: 0.8532 - val_loss: 0.0292 - val_accuracy: 0.8604 Epoch 185/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0300 - accuracy: 0.8537 - val_loss: 0.0291 - val_accuracy: 0.8606 Epoch 186/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0299 - accuracy: 0.8544 - val_loss: 0.0289 - val_accuracy: 0.8617 Epoch 187/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0298 - accuracy: 0.8547 - val_loss: 0.0288 - val_accuracy: 0.8621 Epoch 188/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0297 - accuracy: 0.8549 - val_loss: 0.0287 - val_accuracy: 0.8628 Epoch 189/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0295 - accuracy: 0.8556 - val_loss: 0.0286 - val_accuracy: 0.8632 Epoch 190/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0294 - accuracy: 0.8560 - val_loss: 0.0284 - val_accuracy: 0.8633 Epoch 191/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0293 - accuracy: 0.8565 - val_loss: 0.0283 - val_accuracy: 0.8637 Epoch 192/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0292 - accuracy: 0.8569 - val_loss: 0.0282 - val_accuracy: 0.8640 Epoch 193/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0291 - accuracy: 0.8574 - val_loss: 0.0281 - val_accuracy: 0.8647 Epoch 194/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0290 - accuracy: 0.8578 - val_loss: 0.0280 - val_accuracy: 0.8650 Epoch 195/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0288 - accuracy: 0.8582 - val_loss: 0.0278 - val_accuracy: 0.8654 Epoch 196/200 469/469 [==============================] - 2s 4ms/step - loss: 0.0287 - accuracy: 0.8586 - val_loss: 0.0277 - val_accuracy: 0.8661 Epoch 197/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0286 - accuracy: 0.8590 - val_loss: 0.0276 - val_accuracy: 0.8666 Epoch 198/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0285 - accuracy: 0.8594 - val_loss: 0.0275 - val_accuracy: 0.8670 Epoch 199/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0284 - accuracy: 0.8596 - val_loss: 0.0274 - val_accuracy: 0.8672 Epoch 200/200 469/469 [==============================] - 2s 3ms/step - loss: 0.0283 - accuracy: 0.8600 - val_loss: 0.0273 - val_accuracy: 0.8673
<keras.callbacks.History at 0x7f9226d3f250>
model.evaluate(X_valid, y_valid)
313/313 [==============================] - 1s 2ms/step - loss: 0.0273 - accuracy: 0.8673
[0.027292510494589806, 0.8672999739646912]