10장 – 케라스를 사용한 인공 신경망 소개
이 노트북은 10장에 있는 모든 샘플 코드와 연습문제 해답을 가지고 있습니다.
먼저 몇 개의 모듈을 임포트합니다. 맷플롯립 그래프를 인라인으로 출력하도록 만들고 그림을 저장하는 함수를 준비합니다. 또한 파이썬 버전이 3.5 이상인지 확인합니다(파이썬 2.x에서도 동작하지만 곧 지원이 중단되므로 파이썬 3을 사용하는 것이 좋습니다). 사이킷런 버전이 0.20 이상인지와 텐서플로 버전이 2.0 이상인지 확인합니다.
# 파이썬 ≥3.5 필수
import sys
assert sys.version_info >= (3, 5)
# 사이킷런 ≥0.20 필수
import sklearn
assert sklearn.__version__ >= "0.20"
# 텐서플로 ≥2.0 필수
import tensorflow as tf
assert tf.__version__ >= "2.0"
# 공통 모듈 임포트
import numpy as np
import os
# 노트북 실행 결과를 동일하게 유지하기 위해
np.random.seed(42)
# 깔끔한 그래프 출력을 위해
%matplotlib inline
import matplotlib as mpl
import matplotlib.pyplot as plt
mpl.rc('axes', labelsize=14)
mpl.rc('xtick', labelsize=12)
mpl.rc('ytick', labelsize=12)
# 그림을 저장할 위치
PROJECT_ROOT_DIR = "."
CHAPTER_ID = "ann"
IMAGES_PATH = os.path.join(PROJECT_ROOT_DIR, "images", CHAPTER_ID)
os.makedirs(IMAGES_PATH, exist_ok=True)
def save_fig(fig_id, tight_layout=True, fig_extension="png", resolution=300):
path = os.path.join(IMAGES_PATH, fig_id + "." + fig_extension)
print("그림 저장:", fig_id)
if tight_layout:
plt.tight_layout()
plt.savefig(path, format=fig_extension, dpi=resolution)
노트: 사이킷런 향후 버전에서 max_iter
와 tol
매개변수의 기본값이 바뀌기 때문에 경고를 피하기 위해 명시적으로 지정합니다.
import numpy as np
from sklearn.datasets import load_iris
from sklearn.linear_model import Perceptron
iris = load_iris()
X = iris.data[:, (2, 3)] # 꽃잎 길이, 꽃잎 너비
y = (iris.target == 0).astype(np.int)
per_clf = Perceptron(max_iter=1000, tol=1e-3, random_state=42)
per_clf.fit(X, y)
y_pred = per_clf.predict([[2, 0.5]])
y_pred
array([1])
a = -per_clf.coef_[0][0] / per_clf.coef_[0][1]
b = -per_clf.intercept_ / per_clf.coef_[0][1]
axes = [0, 5, 0, 2]
x0, x1 = np.meshgrid(
np.linspace(axes[0], axes[1], 500).reshape(-1, 1),
np.linspace(axes[2], axes[3], 200).reshape(-1, 1),
)
X_new = np.c_[x0.ravel(), x1.ravel()]
y_predict = per_clf.predict(X_new)
zz = y_predict.reshape(x0.shape)
plt.figure(figsize=(10, 4))
plt.plot(X[y==0, 0], X[y==0, 1], "bs", label="Not Iris-Setosa")
plt.plot(X[y==1, 0], X[y==1, 1], "yo", label="Iris-Setosa")
plt.plot([axes[0], axes[1]], [a * axes[0] + b, a * axes[1] + b], "k-", linewidth=3)
from matplotlib.colors import ListedColormap
custom_cmap = ListedColormap(['#9898ff', '#fafab0'])
plt.contourf(x0, x1, zz, cmap=custom_cmap)
plt.xlabel("Petal length", fontsize=14)
plt.ylabel("Petal width", fontsize=14)
plt.legend(loc="lower right", fontsize=14)
plt.axis(axes)
save_fig("perceptron_iris_plot")
plt.show()
그림 저장: perceptron_iris_plot
def sigmoid(z):
return 1 / (1 + np.exp(-z))
def relu(z):
return np.maximum(0, z)
def derivative(f, z, eps=0.000001):
return (f(z + eps) - f(z - eps))/(2 * eps)
z = np.linspace(-5, 5, 200)
plt.figure(figsize=(11,4))
plt.subplot(121)
plt.plot(z, np.sign(z), "r-", linewidth=1, label="Step")
plt.plot(z, sigmoid(z), "g--", linewidth=2, label="Sigmoid")
plt.plot(z, np.tanh(z), "b-", linewidth=2, label="Tanh")
plt.plot(z, relu(z), "m-.", linewidth=2, label="ReLU")
plt.grid(True)
plt.legend(loc="center right", fontsize=14)
plt.title("Activation functions", fontsize=14)
plt.axis([-5, 5, -1.2, 1.2])
plt.subplot(122)
plt.plot(z, derivative(np.sign, z), "r-", linewidth=1, label="Step")
plt.plot(0, 0, "ro", markersize=5)
plt.plot(0, 0, "rx", markersize=10)
plt.plot(z, derivative(sigmoid, z), "g--", linewidth=2, label="Sigmoid")
plt.plot(z, derivative(np.tanh, z), "b-", linewidth=2, label="Tanh")
plt.plot(z, derivative(relu, z), "m-.", linewidth=2, label="ReLU")
plt.grid(True)
#plt.legend(loc="center right", fontsize=14)
plt.title("Derivatives", fontsize=14)
plt.axis([-5, 5, -0.2, 1.2])
save_fig("activation_functions_plot")
plt.show()
그림 저장: activation_functions_plot
def heaviside(z):
return (z >= 0).astype(z.dtype)
def mlp_xor(x1, x2, activation=heaviside):
return activation(-activation(x1 + x2 - 1.5) + activation(x1 + x2 - 0.5) - 0.5)
x1s = np.linspace(-0.2, 1.2, 100)
x2s = np.linspace(-0.2, 1.2, 100)
x1, x2 = np.meshgrid(x1s, x2s)
z1 = mlp_xor(x1, x2, activation=heaviside)
z2 = mlp_xor(x1, x2, activation=sigmoid)
plt.figure(figsize=(10,4))
plt.subplot(121)
plt.contourf(x1, x2, z1)
plt.plot([0, 1], [0, 1], "gs", markersize=20)
plt.plot([0, 1], [1, 0], "y^", markersize=20)
plt.title("Activation function: heaviside", fontsize=14)
plt.grid(True)
plt.subplot(122)
plt.contourf(x1, x2, z2)
plt.plot([0, 1], [0, 1], "gs", markersize=20)
plt.plot([0, 1], [1, 0], "y^", markersize=20)
plt.title("Activation function: sigmoid", fontsize=14)
plt.grid(True)
먼저 텐서플로와 케라스를 임포트합니다.
import tensorflow as tf
from tensorflow import keras
tf.__version__
'2.6.0'
keras.__version__
'2.6.0'
먼저 MNIST 데이터셋을 로드하겠습니다. 케라스는 keras.datasets
에 널리 사용하는 데이터셋을 로드하기 위한 함수를 제공합니다. 이 데이터셋은 이미 훈련 세트와 테스트 세트로 나누어져 있습니다. 훈련 세트를 더 나누어 검증 세트를 만드는 것이 좋습니다:
fashion_mnist = keras.datasets.fashion_mnist
(X_train_full, y_train_full), (X_test, y_test) = fashion_mnist.load_data()
Downloading data from https://storage.googleapis.com/tensorflow/tf-keras-datasets/train-labels-idx1-ubyte.gz 32768/29515 [=================================] - 0s 0us/step 40960/29515 [=========================================] - 0s 0us/step Downloading data from https://storage.googleapis.com/tensorflow/tf-keras-datasets/train-images-idx3-ubyte.gz 26427392/26421880 [==============================] - 0s 0us/step 26435584/26421880 [==============================] - 0s 0us/step Downloading data from https://storage.googleapis.com/tensorflow/tf-keras-datasets/t10k-labels-idx1-ubyte.gz 16384/5148 [===============================================================================================] - 0s 0us/step Downloading data from https://storage.googleapis.com/tensorflow/tf-keras-datasets/t10k-images-idx3-ubyte.gz 4423680/4422102 [==============================] - 0s 0us/step 4431872/4422102 [==============================] - 0s 0us/step
훈련 세트는 60,000개의 흑백 이미지입니다. 각 이미지의 크기는 28x28 픽셀입니다:
X_train_full.shape
(60000, 28, 28)
각 픽셀의 강도는 바이트(0~255)로 표현됩니다:
X_train_full.dtype
dtype('uint8')
전체 훈련 세트를 검증 세트와 (조금 더 작은) 훈련 세트로 나누어 보죠. 또한 픽셀 강도를 255로 나누어 0~1 범위의 실수로 바꾸겠습니다.
X_valid, X_train = X_train_full[:5000] / 255., X_train_full[5000:] / 255.
y_valid, y_train = y_train_full[:5000], y_train_full[5000:]
X_test = X_test / 255.
맷플롯립의 imshow()
함수와 'binary'
컬러맵을 사용해 이미지를 출력할 수 있습니다:
plt.imshow(X_train[0], cmap="binary")
plt.axis('off')
plt.show()
레이블은 0에서 9까지 (uint8로 표현된) 클래스 아이디입니다:
y_train
array([4, 0, 7, ..., 3, 0, 5], dtype=uint8)
클래스 이름은 다음과 같습니다:
class_names = ["T-shirt/top", "Trouser", "Pullover", "Dress", "Coat",
"Sandal", "Shirt", "Sneaker", "Bag", "Ankle boot"]
훈련 세트에 있는 첫 번째 이미지는 코트입니다:
class_names[y_train[0]]
'Coat'
검증 세트는 5,000개의 이미지를 담고 있고 테스트 세트는 10,000개의 이미지를 가집니다:
X_valid.shape
(5000, 28, 28)
X_test.shape
(10000, 28, 28)
이 데이터셋에 있는 샘플 이미지를 몇 개 출력해 보죠:
n_rows = 4
n_cols = 10
plt.figure(figsize=(n_cols * 1.2, n_rows * 1.2))
for row in range(n_rows):
for col in range(n_cols):
index = n_cols * row + col
plt.subplot(n_rows, n_cols, index + 1)
plt.imshow(X_train[index], cmap="binary", interpolation="nearest")
plt.axis('off')
plt.title(class_names[y_train[index]], fontsize=12)
plt.subplots_adjust(wspace=0.2, hspace=0.5)
save_fig('fashion_mnist_plot', tight_layout=False)
plt.show()
그림 저장: fashion_mnist_plot
model = keras.models.Sequential()
model.add(keras.layers.Flatten(input_shape=[28, 28]))
model.add(keras.layers.Dense(300, activation="relu"))
model.add(keras.layers.Dense(100, activation="relu"))
model.add(keras.layers.Dense(10, activation="softmax"))
keras.backend.clear_session()
np.random.seed(42)
tf.random.set_seed(42)
model = keras.models.Sequential([
keras.layers.Flatten(input_shape=[28, 28]),
keras.layers.Dense(300, activation="relu"),
keras.layers.Dense(100, activation="relu"),
keras.layers.Dense(10, activation="softmax")
])
model.layers
[<keras.layers.core.Flatten at 0x7fe050045510>, <keras.layers.core.Dense at 0x7fe050045650>, <keras.layers.core.Dense at 0x7fe0500459d0>, <keras.layers.core.Dense at 0x7fe050045d90>]
model.summary()
Model: "sequential" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= flatten (Flatten) (None, 784) 0 _________________________________________________________________ dense (Dense) (None, 300) 235500 _________________________________________________________________ dense_1 (Dense) (None, 100) 30100 _________________________________________________________________ dense_2 (Dense) (None, 10) 1010 ================================================================= Total params: 266,610 Trainable params: 266,610 Non-trainable params: 0 _________________________________________________________________
keras.utils.plot_model(model, "my_fashion_mnist_model.png", show_shapes=True)
hidden1 = model.layers[1]
hidden1.name
'dense'
model.get_layer(hidden1.name) is hidden1
True
weights, biases = hidden1.get_weights()
weights
array([[ 0.02448617, -0.00877795, -0.02189048, ..., -0.02766046, 0.03859074, -0.06889391], [ 0.00476504, -0.03105379, -0.0586676 , ..., 0.00602964, -0.02763776, -0.04165364], [-0.06189284, -0.06901957, 0.07102345, ..., -0.04238207, 0.07121518, -0.07331658], ..., [-0.03048757, 0.02155137, -0.05400612, ..., -0.00113463, 0.00228987, 0.05581069], [ 0.07061854, -0.06960931, 0.07038955, ..., -0.00384101, 0.00034875, 0.02878492], [-0.06022581, 0.01577859, -0.02585464, ..., -0.00527829, 0.00272203, -0.06793761]], dtype=float32)
weights.shape
(784, 300)
biases
array([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], dtype=float32)
biases.shape
(300,)
model.compile(loss="sparse_categorical_crossentropy",
optimizer="sgd",
metrics=["accuracy"])
위 코드는 다음과 같습니다:
model.compile(loss=keras.losses.sparse_categorical_crossentropy,
optimizer=keras.optimizers.SGD(),
metrics=[keras.metrics.sparse_categorical_accuracy])
history = model.fit(X_train, y_train, epochs=30,
validation_data=(X_valid, y_valid))
Epoch 1/30 1719/1719 [==============================] - 5s 2ms/step - loss: 0.7237 - accuracy: 0.7644 - val_loss: 0.5207 - val_accuracy: 0.8234 Epoch 2/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.4843 - accuracy: 0.8318 - val_loss: 0.4345 - val_accuracy: 0.8538 Epoch 3/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.4393 - accuracy: 0.8455 - val_loss: 0.5310 - val_accuracy: 0.7986 Epoch 4/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.4126 - accuracy: 0.8566 - val_loss: 0.3918 - val_accuracy: 0.8644 Epoch 5/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.3940 - accuracy: 0.8621 - val_loss: 0.3753 - val_accuracy: 0.8680 Epoch 6/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.3753 - accuracy: 0.8675 - val_loss: 0.3713 - val_accuracy: 0.8724 Epoch 7/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.3635 - accuracy: 0.8710 - val_loss: 0.3620 - val_accuracy: 0.8730 Epoch 8/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.3520 - accuracy: 0.8749 - val_loss: 0.3853 - val_accuracy: 0.8618 Epoch 9/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.3418 - accuracy: 0.8792 - val_loss: 0.3602 - val_accuracy: 0.8690 Epoch 10/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.3324 - accuracy: 0.8823 - val_loss: 0.3437 - val_accuracy: 0.8764 Epoch 11/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.3243 - accuracy: 0.8832 - val_loss: 0.3428 - val_accuracy: 0.8792 Epoch 12/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.3151 - accuracy: 0.8867 - val_loss: 0.3311 - val_accuracy: 0.8826 Epoch 13/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.3083 - accuracy: 0.8894 - val_loss: 0.3271 - val_accuracy: 0.8884 Epoch 14/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.3024 - accuracy: 0.8913 - val_loss: 0.3392 - val_accuracy: 0.8782 Epoch 15/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.2950 - accuracy: 0.8933 - val_loss: 0.3218 - val_accuracy: 0.8854 Epoch 16/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.2893 - accuracy: 0.8971 - val_loss: 0.3091 - val_accuracy: 0.8886 Epoch 17/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.2841 - accuracy: 0.8975 - val_loss: 0.3532 - val_accuracy: 0.8732 Epoch 18/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.2781 - accuracy: 0.8999 - val_loss: 0.3136 - val_accuracy: 0.8894 Epoch 19/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.2731 - accuracy: 0.9021 - val_loss: 0.3122 - val_accuracy: 0.8898 Epoch 20/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.2676 - accuracy: 0.9034 - val_loss: 0.3260 - val_accuracy: 0.8816 Epoch 21/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.2626 - accuracy: 0.9059 - val_loss: 0.3046 - val_accuracy: 0.8922 Epoch 22/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.2578 - accuracy: 0.9070 - val_loss: 0.2973 - val_accuracy: 0.8970 Epoch 23/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.2536 - accuracy: 0.9087 - val_loss: 0.2979 - val_accuracy: 0.8930 Epoch 24/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.2488 - accuracy: 0.9106 - val_loss: 0.3073 - val_accuracy: 0.8874 Epoch 25/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.2445 - accuracy: 0.9117 - val_loss: 0.2969 - val_accuracy: 0.8960 Epoch 26/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.2406 - accuracy: 0.9136 - val_loss: 0.3074 - val_accuracy: 0.8894 Epoch 27/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.2365 - accuracy: 0.9158 - val_loss: 0.3017 - val_accuracy: 0.8946 Epoch 28/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.2330 - accuracy: 0.9163 - val_loss: 0.3000 - val_accuracy: 0.8928 Epoch 29/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.2287 - accuracy: 0.9180 - val_loss: 0.3042 - val_accuracy: 0.8896 Epoch 30/30 1719/1719 [==============================] - 3s 2ms/step - loss: 0.2253 - accuracy: 0.9190 - val_loss: 0.3041 - val_accuracy: 0.8932
history.params
{'epochs': 30, 'steps': 1719, 'verbose': 1}
print(history.epoch)
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29]
history.history.keys()
dict_keys(['loss', 'accuracy', 'val_loss', 'val_accuracy'])
import pandas as pd
pd.DataFrame(history.history).plot(figsize=(8, 5))
plt.grid(True)
plt.gca().set_ylim(0, 1)
save_fig("keras_learning_curves_plot")
plt.show()
그림 저장: keras_learning_curves_plot
model.evaluate(X_test, y_test)
313/313 [==============================] - 1s 2ms/step - loss: 0.3386 - accuracy: 0.8823
[0.3386382460594177, 0.8823000192642212]
X_new = X_test[:3]
y_proba = model.predict(X_new)
y_proba.round(2)
array([[0. , 0. , 0. , 0. , 0. , 0.01, 0. , 0.03, 0. , 0.96], [0. , 0. , 0.99, 0. , 0.01, 0. , 0. , 0. , 0. , 0. ], [0. , 1. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ]], dtype=float32)
경고: model.predict_classes(X_new)
는 삭제될 예정입니다. 대신 np.argmax(model.predict(X_new), axis=-1)
를 사용하세요.
# y_pred = model.predict_classes(X_new)
y_pred = np.argmax(model.predict(X_new), axis=-1)
y_pred
array([9, 2, 1])
np.array(class_names)[y_pred]
array(['Ankle boot', 'Pullover', 'Trouser'], dtype='<U11')
y_new = y_test[:3]
y_new
array([9, 2, 1], dtype=uint8)
plt.figure(figsize=(7.2, 2.4))
for index, image in enumerate(X_new):
plt.subplot(1, 3, index + 1)
plt.imshow(image, cmap="binary", interpolation="nearest")
plt.axis('off')
plt.title(class_names[y_test[index]], fontsize=12)
plt.subplots_adjust(wspace=0.2, hspace=0.5)
save_fig('fashion_mnist_images_plot', tight_layout=False)
plt.show()
그림 저장: fashion_mnist_images_plot
캘리포니아 주택 데이터셋을 로드하여 나누고 스케일을 바꾸어 보겠습니다(2장에서 사용한 수정된 버전이 아니라 원본을 사용합니다):
from sklearn.datasets import fetch_california_housing
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
housing = fetch_california_housing()
X_train_full, X_test, y_train_full, y_test = train_test_split(housing.data, housing.target, random_state=42)
X_train, X_valid, y_train, y_valid = train_test_split(X_train_full, y_train_full, random_state=42)
scaler = StandardScaler()
X_train = scaler.fit_transform(X_train)
X_valid = scaler.transform(X_valid)
X_test = scaler.transform(X_test)
Downloading Cal. housing from https://ndownloader.figshare.com/files/5976036 to /root/scikit_learn_data
np.random.seed(42)
tf.random.set_seed(42)
model = keras.models.Sequential([
keras.layers.Dense(30, activation="relu", input_shape=X_train.shape[1:]),
keras.layers.Dense(1)
])
model.compile(loss="mean_squared_error", optimizer=keras.optimizers.SGD(learning_rate=1e-3))
history = model.fit(X_train, y_train, epochs=20, validation_data=(X_valid, y_valid))
mse_test = model.evaluate(X_test, y_test)
X_new = X_test[:3]
y_pred = model.predict(X_new)
/usr/local/lib/python3.7/dist-packages/keras/optimizer_v2/optimizer_v2.py:356: UserWarning: The `lr` argument is deprecated, use `learning_rate` instead. "The `lr` argument is deprecated, use `learning_rate` instead.")
Epoch 1/20 363/363 [==============================] - 1s 2ms/step - loss: 1.6419 - val_loss: 0.8560 Epoch 2/20 363/363 [==============================] - 1s 2ms/step - loss: 0.7047 - val_loss: 0.6531 Epoch 3/20 363/363 [==============================] - 1s 2ms/step - loss: 0.6345 - val_loss: 0.6099 Epoch 4/20 363/363 [==============================] - 1s 2ms/step - loss: 0.5977 - val_loss: 0.5658 Epoch 5/20 363/363 [==============================] - 1s 2ms/step - loss: 0.5706 - val_loss: 0.5355 Epoch 6/20 363/363 [==============================] - 1s 2ms/step - loss: 0.5472 - val_loss: 0.5173 Epoch 7/20 363/363 [==============================] - 1s 2ms/step - loss: 0.5288 - val_loss: 0.5081 Epoch 8/20 363/363 [==============================] - 1s 2ms/step - loss: 0.5130 - val_loss: 0.4799 Epoch 9/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4992 - val_loss: 0.4690 Epoch 10/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4875 - val_loss: 0.4656 Epoch 11/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4777 - val_loss: 0.4482 Epoch 12/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4688 - val_loss: 0.4479 Epoch 13/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4615 - val_loss: 0.4296 Epoch 14/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4547 - val_loss: 0.4233 Epoch 15/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4488 - val_loss: 0.4176 Epoch 16/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4435 - val_loss: 0.4123 Epoch 17/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4389 - val_loss: 0.4071 Epoch 18/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4347 - val_loss: 0.4037 Epoch 19/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4306 - val_loss: 0.4000 Epoch 20/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4273 - val_loss: 0.3969 162/162 [==============================] - 0s 1ms/step - loss: 0.4212
plt.plot(pd.DataFrame(history.history))
plt.grid(True)
plt.gca().set_ylim(0, 1)
plt.show()
y_pred
array([[0.38856643], [1.6792021 ], [3.1022797 ]], dtype=float32)
모든 신경망 모델이 단순하게 순서대로 나열되지는 않습니다. 어떤 신경망은 매우 복잡한 구조를 가집니다. 여러 개의 입력이 있거나 여러 개의 출력이 있습니다. 예를 들어 와이드 & 딥 신경망(논문 참조)은 입력의 전체 또는 일부를 출력층에 바로 연결합니다.
np.random.seed(42)
tf.random.set_seed(42)
input_ = keras.layers.Input(shape=X_train.shape[1:])
hidden1 = keras.layers.Dense(30, activation="relu")(input_)
hidden2 = keras.layers.Dense(30, activation="relu")(hidden1)
concat = keras.layers.concatenate([input_, hidden2])
output = keras.layers.Dense(1)(concat)
model = keras.models.Model(inputs=[input_], outputs=[output])
model.summary()
Model: "model" __________________________________________________________________________________________________ Layer (type) Output Shape Param # Connected to ================================================================================================== input_1 (InputLayer) [(None, 8)] 0 __________________________________________________________________________________________________ dense_5 (Dense) (None, 30) 270 input_1[0][0] __________________________________________________________________________________________________ dense_6 (Dense) (None, 30) 930 dense_5[0][0] __________________________________________________________________________________________________ concatenate (Concatenate) (None, 38) 0 input_1[0][0] dense_6[0][0] __________________________________________________________________________________________________ dense_7 (Dense) (None, 1) 39 concatenate[0][0] ================================================================================================== Total params: 1,239 Trainable params: 1,239 Non-trainable params: 0 __________________________________________________________________________________________________
model.compile(loss="mean_squared_error", optimizer=keras.optimizers.SGD(learning_rate=1e-3))
history = model.fit(X_train, y_train, epochs=20,
validation_data=(X_valid, y_valid))
mse_test = model.evaluate(X_test, y_test)
y_pred = model.predict(X_new)
Epoch 1/20
/usr/local/lib/python3.7/dist-packages/keras/optimizer_v2/optimizer_v2.py:356: UserWarning: The `lr` argument is deprecated, use `learning_rate` instead. "The `lr` argument is deprecated, use `learning_rate` instead.")
363/363 [==============================] - 1s 2ms/step - loss: 1.2611 - val_loss: 3.3940 Epoch 2/20 363/363 [==============================] - 1s 2ms/step - loss: 0.6580 - val_loss: 0.9360 Epoch 3/20 363/363 [==============================] - 1s 2ms/step - loss: 0.5878 - val_loss: 0.5649 Epoch 4/20 363/363 [==============================] - 1s 2ms/step - loss: 0.5582 - val_loss: 0.5712 Epoch 5/20 363/363 [==============================] - 1s 2ms/step - loss: 0.5347 - val_loss: 0.5045 Epoch 6/20 363/363 [==============================] - 1s 2ms/step - loss: 0.5158 - val_loss: 0.4831 Epoch 7/20 363/363 [==============================] - 1s 2ms/step - loss: 0.5002 - val_loss: 0.4639 Epoch 8/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4876 - val_loss: 0.4638 Epoch 9/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4760 - val_loss: 0.4421 Epoch 10/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4659 - val_loss: 0.4313 Epoch 11/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4577 - val_loss: 0.4345 Epoch 12/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4498 - val_loss: 0.4168 Epoch 13/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4428 - val_loss: 0.4230 Epoch 14/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4366 - val_loss: 0.4047 Epoch 15/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4307 - val_loss: 0.4078 Epoch 16/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4257 - val_loss: 0.3938 Epoch 17/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4210 - val_loss: 0.3952 Epoch 18/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4167 - val_loss: 0.3860 Epoch 19/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4121 - val_loss: 0.3827 Epoch 20/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4088 - val_loss: 0.4054 162/162 [==============================] - 0s 1ms/step - loss: 0.4032
와이드나 딥 경로에 다른 입력 특성을 전달하면 어떻게 될까요? (특성 0에서 4까지) 5개의 특성을 와이드 경로에 보내고 (특성 2에서 7까지) 6개의 특성을 딥 경로에 전달하겠습니다. 3개의 특성(특성 2, 3, 4)은 양쪽에 모두 전달됩니다.
np.random.seed(42)
tf.random.set_seed(42)
input_A = keras.layers.Input(shape=[5], name="wide_input")
input_B = keras.layers.Input(shape=[6], name="deep_input")
hidden1 = keras.layers.Dense(30, activation="relu")(input_B)
hidden2 = keras.layers.Dense(30, activation="relu")(hidden1)
concat = keras.layers.concatenate([input_A, hidden2])
output = keras.layers.Dense(1, name="output")(concat)
model = keras.models.Model(inputs=[input_A, input_B], outputs=[output])
model.compile(loss="mse", optimizer=keras.optimizers.SGD(learning_rate=1e-3))
X_train_A, X_train_B = X_train[:, :5], X_train[:, 2:]
X_valid_A, X_valid_B = X_valid[:, :5], X_valid[:, 2:]
X_test_A, X_test_B = X_test[:, :5], X_test[:, 2:]
X_new_A, X_new_B = X_test_A[:3], X_test_B[:3]
history = model.fit((X_train_A, X_train_B), y_train, epochs=20,
validation_data=((X_valid_A, X_valid_B), y_valid))
mse_test = model.evaluate((X_test_A, X_test_B), y_test)
y_pred = model.predict((X_new_A, X_new_B))
Epoch 1/20
/usr/local/lib/python3.7/dist-packages/keras/optimizer_v2/optimizer_v2.py:356: UserWarning: The `lr` argument is deprecated, use `learning_rate` instead. "The `lr` argument is deprecated, use `learning_rate` instead.")
363/363 [==============================] - 1s 2ms/step - loss: 1.8145 - val_loss: 0.8072 Epoch 2/20 363/363 [==============================] - 1s 2ms/step - loss: 0.6771 - val_loss: 0.6658 Epoch 3/20 363/363 [==============================] - 1s 2ms/step - loss: 0.5979 - val_loss: 0.5687 Epoch 4/20 363/363 [==============================] - 1s 2ms/step - loss: 0.5584 - val_loss: 0.5296 Epoch 5/20 363/363 [==============================] - 1s 2ms/step - loss: 0.5334 - val_loss: 0.4993 Epoch 6/20 363/363 [==============================] - 1s 2ms/step - loss: 0.5120 - val_loss: 0.4811 Epoch 7/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4970 - val_loss: 0.4696 Epoch 8/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4843 - val_loss: 0.4496 Epoch 9/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4730 - val_loss: 0.4404 Epoch 10/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4644 - val_loss: 0.4315 Epoch 11/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4570 - val_loss: 0.4268 Epoch 12/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4510 - val_loss: 0.4166 Epoch 13/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4462 - val_loss: 0.4125 Epoch 14/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4421 - val_loss: 0.4074 Epoch 15/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4385 - val_loss: 0.4044 Epoch 16/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4356 - val_loss: 0.4007 Epoch 17/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4322 - val_loss: 0.4013 Epoch 18/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4305 - val_loss: 0.3987 Epoch 19/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4274 - val_loss: 0.3934 Epoch 20/20 363/363 [==============================] - 1s 2ms/step - loss: 0.4261 - val_loss: 0.4204 162/162 [==============================] - 0s 1ms/step - loss: 0.4219
규제를 위한 보조 출력 추가하기:
np.random.seed(42)
tf.random.set_seed(42)
input_A = keras.layers.Input(shape=[5], name="wide_input")
input_B = keras.layers.Input(shape=[6], name="deep_input")
hidden1 = keras.layers.Dense(30, activation="relu")(input_B)
hidden2 = keras.layers.Dense(30, activation="relu")(hidden1)
concat = keras.layers.concatenate([input_A, hidden2])
output = keras.layers.Dense(1, name="main_output")(concat)
aux_output = keras.layers.Dense(1, name="aux_output")(hidden2)
model = keras.models.Model(inputs=[input_A, input_B],
outputs=[output, aux_output])
model.compile(loss=["mse", "mse"], loss_weights=[0.9, 0.1], optimizer=keras.optimizers.SGD(learning_rate=1e-3))
/usr/local/lib/python3.7/dist-packages/keras/optimizer_v2/optimizer_v2.py:356: UserWarning: The `lr` argument is deprecated, use `learning_rate` instead. "The `lr` argument is deprecated, use `learning_rate` instead.")
history = model.fit([X_train_A, X_train_B], [y_train, y_train], epochs=20,
validation_data=([X_valid_A, X_valid_B], [y_valid, y_valid]))
Epoch 1/20 363/363 [==============================] - 2s 4ms/step - loss: 2.1365 - main_output_loss: 1.9196 - aux_output_loss: 4.0890 - val_loss: 1.6233 - val_main_output_loss: 0.8468 - val_aux_output_loss: 8.6117 Epoch 2/20 363/363 [==============================] - 1s 3ms/step - loss: 0.8905 - main_output_loss: 0.6969 - aux_output_loss: 2.6326 - val_loss: 1.5163 - val_main_output_loss: 0.6836 - val_aux_output_loss: 9.0109 Epoch 3/20 363/363 [==============================] - 1s 3ms/step - loss: 0.7429 - main_output_loss: 0.6088 - aux_output_loss: 1.9499 - val_loss: 1.4639 - val_main_output_loss: 0.6229 - val_aux_output_loss: 9.0326 Epoch 4/20 363/363 [==============================] - 1s 3ms/step - loss: 0.6771 - main_output_loss: 0.5691 - aux_output_loss: 1.6485 - val_loss: 1.3388 - val_main_output_loss: 0.5481 - val_aux_output_loss: 8.4552 Epoch 5/20 363/363 [==============================] - 1s 3ms/step - loss: 0.6381 - main_output_loss: 0.5434 - aux_output_loss: 1.4911 - val_loss: 1.2177 - val_main_output_loss: 0.5194 - val_aux_output_loss: 7.5030 Epoch 6/20 363/363 [==============================] - 1s 3ms/step - loss: 0.6079 - main_output_loss: 0.5207 - aux_output_loss: 1.3923 - val_loss: 1.0935 - val_main_output_loss: 0.5106 - val_aux_output_loss: 6.3396 Epoch 7/20 363/363 [==============================] - 1s 3ms/step - loss: 0.5853 - main_output_loss: 0.5040 - aux_output_loss: 1.3175 - val_loss: 0.9918 - val_main_output_loss: 0.5115 - val_aux_output_loss: 5.3151 Epoch 8/20 363/363 [==============================] - 1s 3ms/step - loss: 0.5666 - main_output_loss: 0.4898 - aux_output_loss: 1.2572 - val_loss: 0.8733 - val_main_output_loss: 0.4733 - val_aux_output_loss: 4.4740 Epoch 9/20 363/363 [==============================] - 1s 3ms/step - loss: 0.5504 - main_output_loss: 0.4771 - aux_output_loss: 1.2101 - val_loss: 0.7832 - val_main_output_loss: 0.4555 - val_aux_output_loss: 3.7323 Epoch 10/20 363/363 [==============================] - 1s 3ms/step - loss: 0.5373 - main_output_loss: 0.4671 - aux_output_loss: 1.1695 - val_loss: 0.7170 - val_main_output_loss: 0.4604 - val_aux_output_loss: 3.0262 Epoch 11/20 363/363 [==============================] - 1s 3ms/step - loss: 0.5266 - main_output_loss: 0.4591 - aux_output_loss: 1.1344 - val_loss: 0.6510 - val_main_output_loss: 0.4293 - val_aux_output_loss: 2.6468 Epoch 12/20 363/363 [==============================] - 1s 3ms/step - loss: 0.5173 - main_output_loss: 0.4520 - aux_output_loss: 1.1048 - val_loss: 0.6051 - val_main_output_loss: 0.4310 - val_aux_output_loss: 2.1722 Epoch 13/20 363/363 [==============================] - 1s 3ms/step - loss: 0.5095 - main_output_loss: 0.4465 - aux_output_loss: 1.0765 - val_loss: 0.5644 - val_main_output_loss: 0.4161 - val_aux_output_loss: 1.8992 Epoch 14/20 363/363 [==============================] - 1s 3ms/step - loss: 0.5027 - main_output_loss: 0.4417 - aux_output_loss: 1.0511 - val_loss: 0.5354 - val_main_output_loss: 0.4119 - val_aux_output_loss: 1.6466 Epoch 15/20 363/363 [==============================] - 1s 3ms/step - loss: 0.4967 - main_output_loss: 0.4376 - aux_output_loss: 1.0280 - val_loss: 0.5124 - val_main_output_loss: 0.4047 - val_aux_output_loss: 1.4812 Epoch 16/20 363/363 [==============================] - 1s 3ms/step - loss: 0.4916 - main_output_loss: 0.4343 - aux_output_loss: 1.0070 - val_loss: 0.4934 - val_main_output_loss: 0.4034 - val_aux_output_loss: 1.3035 Epoch 17/20 363/363 [==============================] - 1s 3ms/step - loss: 0.4867 - main_output_loss: 0.4311 - aux_output_loss: 0.9872 - val_loss: 0.4801 - val_main_output_loss: 0.3984 - val_aux_output_loss: 1.2150 Epoch 18/20 363/363 [==============================] - 1s 4ms/step - loss: 0.4829 - main_output_loss: 0.4289 - aux_output_loss: 0.9686 - val_loss: 0.4694 - val_main_output_loss: 0.3962 - val_aux_output_loss: 1.1279 Epoch 19/20 363/363 [==============================] - 1s 3ms/step - loss: 0.4785 - main_output_loss: 0.4260 - aux_output_loss: 0.9510 - val_loss: 0.4580 - val_main_output_loss: 0.3936 - val_aux_output_loss: 1.0372 Epoch 20/20 363/363 [==============================] - 1s 3ms/step - loss: 0.4756 - main_output_loss: 0.4246 - aux_output_loss: 0.9344 - val_loss: 0.4655 - val_main_output_loss: 0.4048 - val_aux_output_loss: 1.0118
total_loss, main_loss, aux_loss = model.evaluate(
[X_test_A, X_test_B], [y_test, y_test])
y_pred_main, y_pred_aux = model.predict([X_new_A, X_new_B])
162/162 [==============================] - 0s 2ms/step - loss: 0.4668 - main_output_loss: 0.4178 - aux_output_loss: 0.9082 WARNING:tensorflow:5 out of the last 6 calls to <function Model.make_predict_function.<locals>.predict_function at 0x7fe061a46560> triggered tf.function retracing. Tracing is expensive and the excessive number of tracings could be due to (1) creating @tf.function repeatedly in a loop, (2) passing tensors with different shapes, (3) passing Python objects instead of tensors. For (1), please define your @tf.function outside of the loop. For (2), @tf.function has experimental_relax_shapes=True option that relaxes argument shapes that can avoid unnecessary retracing. For (3), please refer to https://www.tensorflow.org/guide/function#controlling_retracing and https://www.tensorflow.org/api_docs/python/tf/function for more details.
class WideAndDeepModel(keras.models.Model):
def __init__(self, units=30, activation="relu", **kwargs):
super().__init__(**kwargs)
self.hidden1 = keras.layers.Dense(units, activation=activation)
self.hidden2 = keras.layers.Dense(units, activation=activation)
self.main_output = keras.layers.Dense(1)
self.aux_output = keras.layers.Dense(1)
def call(self, inputs):
input_A, input_B = inputs
hidden1 = self.hidden1(input_B)
hidden2 = self.hidden2(hidden1)
concat = keras.layers.concatenate([input_A, hidden2])
main_output = self.main_output(concat)
aux_output = self.aux_output(hidden2)
return main_output, aux_output
model = WideAndDeepModel(30, activation="relu")
model.compile(loss="mse", loss_weights=[0.9, 0.1], optimizer=keras.optimizers.SGD(learning_rate=1e-3))
history = model.fit((X_train_A, X_train_B), (y_train, y_train), epochs=10,
validation_data=((X_valid_A, X_valid_B), (y_valid, y_valid)))
total_loss, main_loss, aux_loss = model.evaluate((X_test_A, X_test_B), (y_test, y_test))
y_pred_main, y_pred_aux = model.predict((X_new_A, X_new_B))
Epoch 1/10
/usr/local/lib/python3.7/dist-packages/keras/optimizer_v2/optimizer_v2.py:356: UserWarning: The `lr` argument is deprecated, use `learning_rate` instead. "The `lr` argument is deprecated, use `learning_rate` instead.")
363/363 [==============================] - 2s 3ms/step - loss: 2.3298 - output_1_loss: 2.2186 - output_2_loss: 3.3304 - val_loss: 2.1435 - val_output_1_loss: 1.1581 - val_output_2_loss: 11.0117 Epoch 2/10 363/363 [==============================] - 1s 3ms/step - loss: 0.9714 - output_1_loss: 0.8543 - output_2_loss: 2.0252 - val_loss: 1.7567 - val_output_1_loss: 0.8205 - val_output_2_loss: 10.1825 Epoch 3/10 363/363 [==============================] - 1s 3ms/step - loss: 0.8268 - output_1_loss: 0.7289 - output_2_loss: 1.7082 - val_loss: 1.5664 - val_output_1_loss: 0.7913 - val_output_2_loss: 8.5419 Epoch 4/10 363/363 [==============================] - 1s 3ms/step - loss: 0.7636 - output_1_loss: 0.6764 - output_2_loss: 1.5477 - val_loss: 1.3088 - val_output_1_loss: 0.6549 - val_output_2_loss: 7.1933 Epoch 5/10 363/363 [==============================] - 1s 3ms/step - loss: 0.7211 - output_1_loss: 0.6402 - output_2_loss: 1.4489 - val_loss: 1.1357 - val_output_1_loss: 0.5964 - val_output_2_loss: 5.9898 Epoch 6/10 363/363 [==============================] - 1s 3ms/step - loss: 0.6895 - output_1_loss: 0.6124 - output_2_loss: 1.3833 - val_loss: 1.0036 - val_output_1_loss: 0.5937 - val_output_2_loss: 4.6933 Epoch 7/10 363/363 [==============================] - 1s 3ms/step - loss: 0.6632 - output_1_loss: 0.5894 - output_2_loss: 1.3274 - val_loss: 0.8904 - val_output_1_loss: 0.5591 - val_output_2_loss: 3.8714 Epoch 8/10 363/363 [==============================] - 1s 3ms/step - loss: 0.6410 - output_1_loss: 0.5701 - output_2_loss: 1.2796 - val_loss: 0.8009 - val_output_1_loss: 0.5243 - val_output_2_loss: 3.2903 Epoch 9/10 363/363 [==============================] - 1s 3ms/step - loss: 0.6204 - output_1_loss: 0.5514 - output_2_loss: 1.2416 - val_loss: 0.7357 - val_output_1_loss: 0.5144 - val_output_2_loss: 2.7275 Epoch 10/10 363/363 [==============================] - 1s 3ms/step - loss: 0.6024 - output_1_loss: 0.5355 - output_2_loss: 1.2043 - val_loss: 0.6849 - val_output_1_loss: 0.5014 - val_output_2_loss: 2.3370 162/162 [==============================] - 0s 2ms/step - loss: 0.5841 - output_1_loss: 0.5188 - output_2_loss: 1.1722 WARNING:tensorflow:6 out of the last 7 calls to <function Model.make_predict_function.<locals>.predict_function at 0x7fe0105d0680> triggered tf.function retracing. Tracing is expensive and the excessive number of tracings could be due to (1) creating @tf.function repeatedly in a loop, (2) passing tensors with different shapes, (3) passing Python objects instead of tensors. For (1), please define your @tf.function outside of the loop. For (2), @tf.function has experimental_relax_shapes=True option that relaxes argument shapes that can avoid unnecessary retracing. For (3), please refer to https://www.tensorflow.org/guide/function#controlling_retracing and https://www.tensorflow.org/api_docs/python/tf/function for more details.
np.random.seed(42)
tf.random.set_seed(42)
model = keras.models.Sequential([
keras.layers.Dense(30, activation="relu", input_shape=[8]),
keras.layers.Dense(30, activation="relu"),
keras.layers.Dense(1)
])
model.compile(loss="mse", optimizer=keras.optimizers.SGD(learning_rate=1e-3))
history = model.fit(X_train, y_train, epochs=10, validation_data=(X_valid, y_valid))
mse_test = model.evaluate(X_test, y_test)
Epoch 1/10
/usr/local/lib/python3.7/dist-packages/keras/optimizer_v2/optimizer_v2.py:356: UserWarning: The `lr` argument is deprecated, use `learning_rate` instead. "The `lr` argument is deprecated, use `learning_rate` instead.")
363/363 [==============================] - 1s 2ms/step - loss: 1.8866 - val_loss: 0.7126 Epoch 2/10 363/363 [==============================] - 1s 2ms/step - loss: 0.6577 - val_loss: 0.6880 Epoch 3/10 363/363 [==============================] - 1s 2ms/step - loss: 0.5934 - val_loss: 0.5803 Epoch 4/10 363/363 [==============================] - 1s 2ms/step - loss: 0.5557 - val_loss: 0.5166 Epoch 5/10 363/363 [==============================] - 1s 2ms/step - loss: 0.5272 - val_loss: 0.4895 Epoch 6/10 363/363 [==============================] - 1s 2ms/step - loss: 0.5033 - val_loss: 0.4951 Epoch 7/10 363/363 [==============================] - 1s 2ms/step - loss: 0.4854 - val_loss: 0.4861 Epoch 8/10 363/363 [==============================] - 1s 2ms/step - loss: 0.4709 - val_loss: 0.4554 Epoch 9/10 363/363 [==============================] - 1s 2ms/step - loss: 0.4578 - val_loss: 0.4413 Epoch 10/10 363/363 [==============================] - 1s 2ms/step - loss: 0.4474 - val_loss: 0.4379 162/162 [==============================] - 0s 1ms/step - loss: 0.4382
model.save("my_keras_model.h5")
model = keras.models.load_model("my_keras_model.h5")
model.predict(X_new)
array([[0.54002357], [1.6505971 ], [3.009824 ]], dtype=float32)
model.save_weights("my_keras_weights.ckpt")
model.load_weights("my_keras_weights.ckpt")
<tensorflow.python.training.tracking.util.CheckpointLoadStatus at 0x7fe063867690>
keras.backend.clear_session()
np.random.seed(42)
tf.random.set_seed(42)
model = keras.models.Sequential([
keras.layers.Dense(30, activation="relu", input_shape=[8]),
keras.layers.Dense(30, activation="relu"),
keras.layers.Dense(1)
])
model.compile(loss="mse", optimizer=keras.optimizers.SGD(learning_rate=1e-3))
checkpoint_cb = keras.callbacks.ModelCheckpoint("my_keras_model.h5", save_best_only=True)
history = model.fit(X_train, y_train, epochs=10,
validation_data=(X_valid, y_valid),
callbacks=[checkpoint_cb])
model = keras.models.load_model("my_keras_model.h5") # 최상의 모델로 롤백
mse_test = model.evaluate(X_test, y_test)
/usr/local/lib/python3.7/dist-packages/keras/optimizer_v2/optimizer_v2.py:356: UserWarning: The `lr` argument is deprecated, use `learning_rate` instead. "The `lr` argument is deprecated, use `learning_rate` instead.")
Epoch 1/10 363/363 [==============================] - 1s 2ms/step - loss: 1.8866 - val_loss: 0.7126 Epoch 2/10 363/363 [==============================] - 1s 2ms/step - loss: 0.6577 - val_loss: 0.6880 Epoch 3/10 363/363 [==============================] - 1s 2ms/step - loss: 0.5934 - val_loss: 0.5803 Epoch 4/10 363/363 [==============================] - 1s 2ms/step - loss: 0.5557 - val_loss: 0.5166 Epoch 5/10 363/363 [==============================] - 1s 2ms/step - loss: 0.5272 - val_loss: 0.4895 Epoch 6/10 363/363 [==============================] - 1s 2ms/step - loss: 0.5033 - val_loss: 0.4951 Epoch 7/10 363/363 [==============================] - 1s 2ms/step - loss: 0.4854 - val_loss: 0.4861 Epoch 8/10 363/363 [==============================] - 1s 2ms/step - loss: 0.4709 - val_loss: 0.4554 Epoch 9/10 363/363 [==============================] - 1s 2ms/step - loss: 0.4578 - val_loss: 0.4413 Epoch 10/10 363/363 [==============================] - 1s 2ms/step - loss: 0.4474 - val_loss: 0.4379 162/162 [==============================] - 0s 1ms/step - loss: 0.4382
model.compile(loss="mse", optimizer=keras.optimizers.SGD(learning_rate=1e-3))
early_stopping_cb = keras.callbacks.EarlyStopping(patience=10,
restore_best_weights=True)
history = model.fit(X_train, y_train, epochs=100,
validation_data=(X_valid, y_valid),
callbacks=[checkpoint_cb, early_stopping_cb])
mse_test = model.evaluate(X_test, y_test)
Epoch 1/100
/usr/local/lib/python3.7/dist-packages/keras/optimizer_v2/optimizer_v2.py:356: UserWarning: The `lr` argument is deprecated, use `learning_rate` instead. "The `lr` argument is deprecated, use `learning_rate` instead.")
363/363 [==============================] - 1s 2ms/step - loss: 0.4393 - val_loss: 0.4110 Epoch 2/100 363/363 [==============================] - 1s 2ms/step - loss: 0.4315 - val_loss: 0.4266 Epoch 3/100 363/363 [==============================] - 1s 2ms/step - loss: 0.4259 - val_loss: 0.3996 Epoch 4/100 363/363 [==============================] - 1s 2ms/step - loss: 0.4201 - val_loss: 0.3939 Epoch 5/100 363/363 [==============================] - 1s 2ms/step - loss: 0.4154 - val_loss: 0.3889 Epoch 6/100 363/363 [==============================] - 1s 2ms/step - loss: 0.4111 - val_loss: 0.3866 Epoch 7/100 363/363 [==============================] - 1s 2ms/step - loss: 0.4074 - val_loss: 0.3860 Epoch 8/100 363/363 [==============================] - 1s 2ms/step - loss: 0.4040 - val_loss: 0.3793 Epoch 9/100 363/363 [==============================] - 1s 2ms/step - loss: 0.4008 - val_loss: 0.3746 Epoch 10/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3976 - val_loss: 0.3723 Epoch 11/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3950 - val_loss: 0.3697 Epoch 12/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3923 - val_loss: 0.3669 Epoch 13/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3897 - val_loss: 0.3661 Epoch 14/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3874 - val_loss: 0.3631 Epoch 15/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3851 - val_loss: 0.3660 Epoch 16/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3829 - val_loss: 0.3625 Epoch 17/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3810 - val_loss: 0.3592 Epoch 18/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3788 - val_loss: 0.3563 Epoch 19/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3766 - val_loss: 0.3535 Epoch 20/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3751 - val_loss: 0.3709 Epoch 21/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3732 - val_loss: 0.3512 Epoch 22/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3715 - val_loss: 0.3699 Epoch 23/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3700 - val_loss: 0.3476 Epoch 24/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3685 - val_loss: 0.3561 Epoch 25/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3671 - val_loss: 0.3527 Epoch 26/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3658 - val_loss: 0.3701 Epoch 27/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3647 - val_loss: 0.3432 Epoch 28/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3635 - val_loss: 0.3592 Epoch 29/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3625 - val_loss: 0.3521 Epoch 30/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3613 - val_loss: 0.3626 Epoch 31/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3601 - val_loss: 0.3431 Epoch 32/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3589 - val_loss: 0.3766 Epoch 33/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3584 - val_loss: 0.3374 Epoch 34/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3572 - val_loss: 0.3407 Epoch 35/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3563 - val_loss: 0.3614 Epoch 36/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3555 - val_loss: 0.3348 Epoch 37/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3546 - val_loss: 0.3573 Epoch 38/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3538 - val_loss: 0.3367 Epoch 39/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3530 - val_loss: 0.3425 Epoch 40/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3523 - val_loss: 0.3369 Epoch 41/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3515 - val_loss: 0.3514 Epoch 42/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3511 - val_loss: 0.3427 Epoch 43/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3500 - val_loss: 0.3679 Epoch 44/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3496 - val_loss: 0.3562 Epoch 45/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3490 - val_loss: 0.3336 Epoch 46/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3481 - val_loss: 0.3456 Epoch 47/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3478 - val_loss: 0.3433 Epoch 48/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3471 - val_loss: 0.3658 Epoch 49/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3466 - val_loss: 0.3286 Epoch 50/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3460 - val_loss: 0.3268 Epoch 51/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3454 - val_loss: 0.3439 Epoch 52/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3449 - val_loss: 0.3263 Epoch 53/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3444 - val_loss: 0.3911 Epoch 54/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3439 - val_loss: 0.3275 Epoch 55/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3435 - val_loss: 0.3561 Epoch 56/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3430 - val_loss: 0.3237 Epoch 57/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3423 - val_loss: 0.3242 Epoch 58/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3419 - val_loss: 0.3764 Epoch 59/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3417 - val_loss: 0.3289 Epoch 60/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3410 - val_loss: 0.3502 Epoch 61/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3404 - val_loss: 0.3457 Epoch 62/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3402 - val_loss: 0.3444 Epoch 63/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3392 - val_loss: 0.3290 Epoch 64/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3393 - val_loss: 0.3217 Epoch 65/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3387 - val_loss: 0.3351 Epoch 66/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3383 - val_loss: 0.3232 Epoch 67/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3376 - val_loss: 0.3567 Epoch 68/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3375 - val_loss: 0.3257 Epoch 69/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3370 - val_loss: 0.3348 Epoch 70/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3365 - val_loss: 0.3560 Epoch 71/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3361 - val_loss: 0.3582 Epoch 72/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3357 - val_loss: 0.3287 Epoch 73/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3351 - val_loss: 0.3203 Epoch 74/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3350 - val_loss: 0.3839 Epoch 75/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3347 - val_loss: 0.3234 Epoch 76/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3342 - val_loss: 0.3475 Epoch 77/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3338 - val_loss: 0.3408 Epoch 78/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3335 - val_loss: 0.3462 Epoch 79/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3332 - val_loss: 0.3348 Epoch 80/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3329 - val_loss: 0.3355 Epoch 81/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3324 - val_loss: 0.3273 Epoch 82/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3320 - val_loss: 0.3166 Epoch 83/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3317 - val_loss: 0.3280 Epoch 84/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3312 - val_loss: 0.3633 Epoch 85/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3310 - val_loss: 0.3175 Epoch 86/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3308 - val_loss: 0.3156 Epoch 87/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3305 - val_loss: 0.3528 Epoch 88/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3299 - val_loss: 0.3257 Epoch 89/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3294 - val_loss: 0.3627 Epoch 90/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3296 - val_loss: 0.3379 Epoch 91/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3291 - val_loss: 0.3210 Epoch 92/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3287 - val_loss: 0.3457 Epoch 93/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3285 - val_loss: 0.3157 Epoch 94/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3281 - val_loss: 0.3409 Epoch 95/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3276 - val_loss: 0.3379 Epoch 96/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3273 - val_loss: 0.3213 162/162 [==============================] - 0s 1ms/step - loss: 0.3310
class PrintValTrainRatioCallback(keras.callbacks.Callback):
def on_epoch_end(self, epoch, logs):
print("\nval/train: {:.2f}".format(logs["val_loss"] / logs["loss"]))
val_train_ratio_cb = PrintValTrainRatioCallback()
history = model.fit(X_train, y_train, epochs=1,
validation_data=(X_valid, y_valid),
callbacks=[val_train_ratio_cb])
363/363 [==============================] - 1s 2ms/step - loss: 0.3302 - val_loss: 0.3557 val/train: 1.08
root_logdir = os.path.join(os.curdir, "my_logs")
def get_run_logdir():
import time
run_id = time.strftime("run_%Y_%m_%d-%H_%M_%S")
return os.path.join(root_logdir, run_id)
run_logdir = get_run_logdir()
run_logdir
'./my_logs/run_2021_08_22-22_52_19'
keras.backend.clear_session()
np.random.seed(42)
tf.random.set_seed(42)
model = keras.models.Sequential([
keras.layers.Dense(30, activation="relu", input_shape=[8]),
keras.layers.Dense(30, activation="relu"),
keras.layers.Dense(1)
])
model.compile(loss="mse", optimizer=keras.optimizers.SGD(learning_rate=1e-3))
/usr/local/lib/python3.7/dist-packages/keras/optimizer_v2/optimizer_v2.py:356: UserWarning: The `lr` argument is deprecated, use `learning_rate` instead. "The `lr` argument is deprecated, use `learning_rate` instead.")
tensorboard_cb = keras.callbacks.TensorBoard(run_logdir)
history = model.fit(X_train, y_train, epochs=30,
validation_data=(X_valid, y_valid),
callbacks=[checkpoint_cb, tensorboard_cb])
Epoch 1/30 363/363 [==============================] - 1s 3ms/step - loss: 1.8866 - val_loss: 0.7126 Epoch 2/30 363/363 [==============================] - 1s 2ms/step - loss: 0.6577 - val_loss: 0.6880 Epoch 3/30 363/363 [==============================] - 1s 2ms/step - loss: 0.5934 - val_loss: 0.5803 Epoch 4/30 363/363 [==============================] - 1s 2ms/step - loss: 0.5557 - val_loss: 0.5166 Epoch 5/30 363/363 [==============================] - 1s 2ms/step - loss: 0.5272 - val_loss: 0.4895 Epoch 6/30 363/363 [==============================] - 1s 2ms/step - loss: 0.5033 - val_loss: 0.4951 Epoch 7/30 363/363 [==============================] - 1s 2ms/step - loss: 0.4854 - val_loss: 0.4861 Epoch 8/30 363/363 [==============================] - 1s 2ms/step - loss: 0.4709 - val_loss: 0.4554 Epoch 9/30 363/363 [==============================] - 1s 2ms/step - loss: 0.4578 - val_loss: 0.4413 Epoch 10/30 363/363 [==============================] - 1s 2ms/step - loss: 0.4474 - val_loss: 0.4379 Epoch 11/30 363/363 [==============================] - 1s 2ms/step - loss: 0.4393 - val_loss: 0.4396 Epoch 12/30 363/363 [==============================] - 1s 2ms/step - loss: 0.4318 - val_loss: 0.4507 Epoch 13/30 363/363 [==============================] - 1s 2ms/step - loss: 0.4261 - val_loss: 0.3997 Epoch 14/30 363/363 [==============================] - 1s 2ms/step - loss: 0.4202 - val_loss: 0.3956 Epoch 15/30 363/363 [==============================] - 1s 2ms/step - loss: 0.4155 - val_loss: 0.3916 Epoch 16/30 363/363 [==============================] - 1s 2ms/step - loss: 0.4112 - val_loss: 0.3937 Epoch 17/30 363/363 [==============================] - 1s 2ms/step - loss: 0.4077 - val_loss: 0.3809 Epoch 18/30 363/363 [==============================] - 1s 2ms/step - loss: 0.4040 - val_loss: 0.3794 Epoch 19/30 363/363 [==============================] - 1s 2ms/step - loss: 0.4005 - val_loss: 0.3850 Epoch 20/30 363/363 [==============================] - 1s 2ms/step - loss: 0.3980 - val_loss: 0.3809 Epoch 21/30 363/363 [==============================] - 1s 2ms/step - loss: 0.3949 - val_loss: 0.3701 Epoch 22/30 363/363 [==============================] - 1s 2ms/step - loss: 0.3924 - val_loss: 0.3781 Epoch 23/30 363/363 [==============================] - 1s 2ms/step - loss: 0.3898 - val_loss: 0.3650 Epoch 24/30 363/363 [==============================] - 1s 2ms/step - loss: 0.3874 - val_loss: 0.3655 Epoch 25/30 363/363 [==============================] - 1s 2ms/step - loss: 0.3851 - val_loss: 0.3611 Epoch 26/30 363/363 [==============================] - 1s 2ms/step - loss: 0.3829 - val_loss: 0.3626 Epoch 27/30 363/363 [==============================] - 1s 2ms/step - loss: 0.3809 - val_loss: 0.3564 Epoch 28/30 363/363 [==============================] - 1s 2ms/step - loss: 0.3788 - val_loss: 0.3579 Epoch 29/30 363/363 [==============================] - 1s 2ms/step - loss: 0.3769 - val_loss: 0.3560 Epoch 30/30 363/363 [==============================] - 1s 2ms/step - loss: 0.3750 - val_loss: 0.3548
텐서보드 서버를 실행하는 한 가지 방법은 터미널에서 직접 실행하는 것입니다. 터미널을 열고 텐서보드가 설치된 가상 환경을 활성화합니다. 그다음 노트북 디렉토리로 이동하여 다음 명령을 입력하세요:
$ tensorboard --logdir=./my_logs --port=6006
그다음 웹 브라우저를 열고 localhost:6006에 접속하면 텐서보드를 사용할 수 있습니다. 사용이 끝나면 터미널에서 Ctrl-C를 눌러 텐서보드 서버를 종료하세요.
또는 다음처럼 텐서보드의 주피터 확장을 사용할 수 있습니다(이 명령은 텐서보드가 로컬 컴퓨터에 설치되어 있어야 합니다):
%load_ext tensorboard
%tensorboard --logdir=./my_logs --port=6006
run_logdir2 = get_run_logdir()
run_logdir2
'./my_logs/run_2021_08_22-22_53_04'
keras.backend.clear_session()
np.random.seed(42)
tf.random.set_seed(42)
model = keras.models.Sequential([
keras.layers.Dense(30, activation="relu", input_shape=[8]),
keras.layers.Dense(30, activation="relu"),
keras.layers.Dense(1)
])
model.compile(loss="mse", optimizer=keras.optimizers.SGD(learning_rate=0.05))
/usr/local/lib/python3.7/dist-packages/keras/optimizer_v2/optimizer_v2.py:356: UserWarning: The `lr` argument is deprecated, use `learning_rate` instead. "The `lr` argument is deprecated, use `learning_rate` instead.")
tensorboard_cb = keras.callbacks.TensorBoard(run_logdir2)
history = model.fit(X_train, y_train, epochs=30,
validation_data=(X_valid, y_valid),
callbacks=[checkpoint_cb, tensorboard_cb])
Epoch 1/30 363/363 [==============================] - 1s 3ms/step - loss: 0.5530 - val_loss: 302.8501 Epoch 2/30 363/363 [==============================] - 1s 2ms/step - loss: 92.2124 - val_loss: 0.9107 Epoch 3/30 363/363 [==============================] - 1s 2ms/step - loss: 0.9291 - val_loss: 0.8864 Epoch 4/30 363/363 [==============================] - 1s 2ms/step - loss: 0.8754 - val_loss: 0.8181 Epoch 5/30 363/363 [==============================] - 1s 2ms/step - loss: 0.8495 - val_loss: 0.8400 Epoch 6/30 363/363 [==============================] - 1s 2ms/step - loss: 0.8087 - val_loss: 0.7190 Epoch 7/30 363/363 [==============================] - 1s 2ms/step - loss: 0.7832 - val_loss: 0.7886 Epoch 8/30 363/363 [==============================] - 1s 2ms/step - loss: 0.7452 - val_loss: 0.6571 Epoch 9/30 363/363 [==============================] - 1s 2ms/step - loss: 0.7310 - val_loss: 0.6445 Epoch 10/30 363/363 [==============================] - 1s 2ms/step - loss: 0.9471 - val_loss: 0.9889 Epoch 11/30 363/363 [==============================] - 1s 2ms/step - loss: 0.9150 - val_loss: 0.8715 Epoch 12/30 363/363 [==============================] - 1s 2ms/step - loss: 0.8971 - val_loss: 0.8913 Epoch 13/30 363/363 [==============================] - 1s 2ms/step - loss: 7.9472 - val_loss: 0.9471 Epoch 14/30 363/363 [==============================] - 1s 2ms/step - loss: 0.9446 - val_loss: 0.8746 Epoch 15/30 363/363 [==============================] - 1s 2ms/step - loss: 0.8675 - val_loss: 0.8190 Epoch 16/30 363/363 [==============================] - 1s 2ms/step - loss: 0.8220 - val_loss: 0.7647 Epoch 17/30 363/363 [==============================] - 1s 2ms/step - loss: 0.7626 - val_loss: 0.6938 Epoch 18/30 363/363 [==============================] - 1s 2ms/step - loss: 0.7682 - val_loss: 0.7478 Epoch 19/30 363/363 [==============================] - 1s 2ms/step - loss: 0.7817 - val_loss: 0.7442 Epoch 20/30 363/363 [==============================] - 1s 2ms/step - loss: 0.7599 - val_loss: 0.7292 Epoch 21/30 363/363 [==============================] - 1s 2ms/step - loss: 0.7385 - val_loss: 1.0907 Epoch 22/30 363/363 [==============================] - 1s 2ms/step - loss: 0.7197 - val_loss: 0.7698 Epoch 23/30 363/363 [==============================] - 1s 2ms/step - loss: 0.6648 - val_loss: 0.5595 Epoch 24/30 363/363 [==============================] - 1s 2ms/step - loss: 0.6681 - val_loss: 0.6636 Epoch 25/30 363/363 [==============================] - 1s 2ms/step - loss: 0.5831 - val_loss: 0.5468 Epoch 26/30 363/363 [==============================] - 1s 2ms/step - loss: 0.6095 - val_loss: 0.4990 Epoch 27/30 363/363 [==============================] - 1s 2ms/step - loss: 0.5505 - val_loss: 0.4485 Epoch 28/30 363/363 [==============================] - 1s 2ms/step - loss: 0.6104 - val_loss: 0.6129 Epoch 29/30 363/363 [==============================] - 1s 2ms/step - loss: 0.6054 - val_loss: 0.5005 Epoch 30/30 363/363 [==============================] - 1s 2ms/step - loss: 0.5755 - val_loss: 0.5436
텐서보드에 실행 결과가 2개 있습니다. 학습 곡선을 비교해 보세요.
사용할 수 있는 로깅 옵션을 확인해 보죠:
help(keras.callbacks.TensorBoard.__init__)
Help on function __init__ in module keras.callbacks: __init__(self, log_dir='logs', histogram_freq=0, write_graph=True, write_images=False, write_steps_per_second=False, update_freq='epoch', profile_batch=2, embeddings_freq=0, embeddings_metadata=None, **kwargs) Initialize self. See help(type(self)) for accurate signature.
keras.backend.clear_session()
np.random.seed(42)
tf.random.set_seed(42)
def build_model(n_hidden=1, n_neurons=30, learning_rate=3e-3, input_shape=[8]):
model = keras.models.Sequential()
model.add(keras.layers.InputLayer(input_shape=input_shape))
for layer in range(n_hidden):
model.add(keras.layers.Dense(n_neurons, activation="relu"))
model.add(keras.layers.Dense(1))
optimizer = keras.optimizers.SGD(learning_rate=learning_rate)
model.compile(loss="mse", optimizer=optimizer)
return model
keras_reg = keras.wrappers.scikit_learn.KerasRegressor(build_model)
keras_reg.fit(X_train, y_train, epochs=100,
validation_data=(X_valid, y_valid),
callbacks=[keras.callbacks.EarlyStopping(patience=10)])
Epoch 1/100
/usr/local/lib/python3.7/dist-packages/keras/optimizer_v2/optimizer_v2.py:356: UserWarning: The `lr` argument is deprecated, use `learning_rate` instead. "The `lr` argument is deprecated, use `learning_rate` instead.")
363/363 [==============================] - 1s 2ms/step - loss: 1.0896 - val_loss: 20.7721 Epoch 2/100 363/363 [==============================] - 1s 2ms/step - loss: 0.7606 - val_loss: 5.0266 Epoch 3/100 363/363 [==============================] - 1s 2ms/step - loss: 0.5456 - val_loss: 0.5490 Epoch 4/100 363/363 [==============================] - 1s 2ms/step - loss: 0.4732 - val_loss: 0.4529 Epoch 5/100 363/363 [==============================] - 1s 2ms/step - loss: 0.4503 - val_loss: 0.4188 Epoch 6/100 363/363 [==============================] - 1s 2ms/step - loss: 0.4338 - val_loss: 0.4129 Epoch 7/100 363/363 [==============================] - 1s 2ms/step - loss: 0.4241 - val_loss: 0.4004 Epoch 8/100 363/363 [==============================] - 1s 2ms/step - loss: 0.4168 - val_loss: 0.3944 Epoch 9/100 363/363 [==============================] - 1s 2ms/step - loss: 0.4108 - val_loss: 0.3961 Epoch 10/100 363/363 [==============================] - 1s 2ms/step - loss: 0.4060 - val_loss: 0.4071 Epoch 11/100 363/363 [==============================] - 1s 2ms/step - loss: 0.4021 - val_loss: 0.3855 Epoch 12/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3984 - val_loss: 0.4136 Epoch 13/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3951 - val_loss: 0.3997 Epoch 14/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3921 - val_loss: 0.3818 Epoch 15/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3894 - val_loss: 0.3829 Epoch 16/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3869 - val_loss: 0.3739 Epoch 17/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3848 - val_loss: 0.4022 Epoch 18/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3829 - val_loss: 0.3873 Epoch 19/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3807 - val_loss: 0.3768 Epoch 20/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3791 - val_loss: 0.4191 Epoch 21/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3774 - val_loss: 0.3927 Epoch 22/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3756 - val_loss: 0.4237 Epoch 23/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3742 - val_loss: 0.3523 Epoch 24/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3725 - val_loss: 0.3842 Epoch 25/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3710 - val_loss: 0.4162 Epoch 26/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3700 - val_loss: 0.3980 Epoch 27/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3691 - val_loss: 0.3474 Epoch 28/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3677 - val_loss: 0.3920 Epoch 29/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3670 - val_loss: 0.3566 Epoch 30/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3653 - val_loss: 0.4191 Epoch 31/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3647 - val_loss: 0.3721 Epoch 32/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3633 - val_loss: 0.3948 Epoch 33/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3632 - val_loss: 0.3423 Epoch 34/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3617 - val_loss: 0.3453 Epoch 35/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3610 - val_loss: 0.4068 Epoch 36/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3608 - val_loss: 0.3417 Epoch 37/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3596 - val_loss: 0.3787 Epoch 38/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3589 - val_loss: 0.3379 Epoch 39/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3582 - val_loss: 0.3419 Epoch 40/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3572 - val_loss: 0.3705 Epoch 41/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3570 - val_loss: 0.3660 Epoch 42/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3563 - val_loss: 0.3804 Epoch 43/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3551 - val_loss: 0.3765 Epoch 44/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3548 - val_loss: 0.3813 Epoch 45/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3543 - val_loss: 0.3326 Epoch 46/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3532 - val_loss: 0.3385 Epoch 47/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3527 - val_loss: 0.3656 Epoch 48/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3521 - val_loss: 0.3578 Epoch 49/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3525 - val_loss: 0.3359 Epoch 50/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3510 - val_loss: 0.3318 Epoch 51/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3504 - val_loss: 0.3562 Epoch 52/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3502 - val_loss: 0.3523 Epoch 53/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3496 - val_loss: 0.4585 Epoch 54/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3497 - val_loss: 0.3811 Epoch 55/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3490 - val_loss: 0.3539 Epoch 56/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3485 - val_loss: 0.3725 Epoch 57/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3479 - val_loss: 0.3337 Epoch 58/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3469 - val_loss: 0.4007 Epoch 59/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3475 - val_loss: 0.3264 Epoch 60/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3465 - val_loss: 0.3271 Epoch 61/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3452 - val_loss: 0.3347 Epoch 62/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3453 - val_loss: 0.3493 Epoch 63/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3444 - val_loss: 0.3402 Epoch 64/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3450 - val_loss: 0.3274 Epoch 65/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3437 - val_loss: 0.3297 Epoch 66/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3431 - val_loss: 0.3307 Epoch 67/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3428 - val_loss: 0.3252 Epoch 68/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3423 - val_loss: 0.3242 Epoch 69/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3419 - val_loss: 0.3254 Epoch 70/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3413 - val_loss: 0.3657 Epoch 71/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3414 - val_loss: 0.3381 Epoch 72/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3405 - val_loss: 0.3273 Epoch 73/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3400 - val_loss: 0.3241 Epoch 74/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3402 - val_loss: 0.3656 Epoch 75/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3397 - val_loss: 0.3286 Epoch 76/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3395 - val_loss: 0.3241 Epoch 77/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3383 - val_loss: 0.3377 Epoch 78/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3384 - val_loss: 0.3363 Epoch 79/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3383 - val_loss: 0.3224 Epoch 80/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3376 - val_loss: 0.3603 Epoch 81/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3384 - val_loss: 0.3437 Epoch 82/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3371 - val_loss: 0.3223 Epoch 83/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3368 - val_loss: 0.3306 Epoch 84/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3362 - val_loss: 0.4086 Epoch 85/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3369 - val_loss: 0.3282 Epoch 86/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3359 - val_loss: 0.3399 Epoch 87/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3356 - val_loss: 0.3789 Epoch 88/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3358 - val_loss: 0.3224 Epoch 89/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3346 - val_loss: 0.4135 Epoch 90/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3354 - val_loss: 0.3395 Epoch 91/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3341 - val_loss: 0.4533 Epoch 92/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3355 - val_loss: 0.4068
<keras.callbacks.History at 0x7fe061c36910>
mse_test = keras_reg.score(X_test, y_test)
162/162 [==============================] - 0s 1ms/step - loss: 0.3362
y_pred = keras_reg.predict(X_new)
np.random.seed(42)
tf.random.set_seed(42)
경고: 다음 셀은 훈련이 끝날 때 에러가 납니다. 이는 최근 사이킷런의 변화때문에 생긴 케라스 이슈 #13586 때문입니다. 이 이슈를 해결하기 위한 풀 리퀘스트 #13598가 있으므로 곧 해결될 것 같습니다. 그때까지는 .tolist()
와 .rvs(1000).tolist()
를 추가해 사용합니다.
from scipy.stats import reciprocal
from sklearn.model_selection import RandomizedSearchCV
param_distribs = {
"n_hidden": [0, 1, 2, 3],
"n_neurons": np.arange(1, 100).tolist(),
"learning_rate": reciprocal(3e-4, 3e-2).rvs(1000).tolist(),
}
rnd_search_cv = RandomizedSearchCV(keras_reg, param_distribs, n_iter=10, cv=3, verbose=2)
rnd_search_cv.fit(X_train, y_train, epochs=100,
validation_data=(X_valid, y_valid),
callbacks=[keras.callbacks.EarlyStopping(patience=10)])
Fitting 3 folds for each of 10 candidates, totalling 30 fits [CV] n_neurons=4, n_hidden=1, learning_rate=0.022174573948353458 ..... Epoch 1/100
[Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers. /usr/local/lib/python3.7/dist-packages/keras/optimizer_v2/optimizer_v2.py:356: UserWarning: The `lr` argument is deprecated, use `learning_rate` instead. "The `lr` argument is deprecated, use `learning_rate` instead.")
242/242 [==============================] - 1s 3ms/step - loss: 0.8420 - val_loss: 0.4703 Epoch 2/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4815 - val_loss: 0.4247 Epoch 3/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4519 - val_loss: 0.4052 Epoch 4/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4429 - val_loss: 0.3975 Epoch 5/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4368 - val_loss: 0.3991 Epoch 6/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4340 - val_loss: 0.4031 Epoch 7/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4351 - val_loss: 0.4043 Epoch 8/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4267 - val_loss: 0.3929 Epoch 9/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4258 - val_loss: 0.4040 Epoch 10/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4237 - val_loss: 0.3886 Epoch 11/100 242/242 [==============================] - 0s 2ms/step - loss: 0.4209 - val_loss: 0.3999 Epoch 12/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4170 - val_loss: 0.4085 Epoch 13/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4139 - val_loss: 0.3922 Epoch 14/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4132 - val_loss: 0.3918 Epoch 15/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4106 - val_loss: 0.3886 Epoch 16/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4092 - val_loss: 0.3933 Epoch 17/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4092 - val_loss: 0.3907 Epoch 18/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4087 - val_loss: 0.3955 Epoch 19/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4058 - val_loss: 0.3935 Epoch 20/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4053 - val_loss: 0.3891 121/121 [==============================] - 0s 1ms/step - loss: 0.4251 [CV] n_neurons=4, n_hidden=1, learning_rate=0.022174573948353458, total= 20.9s [CV] n_neurons=4, n_hidden=1, learning_rate=0.022174573948353458 ..... Epoch 1/100
[Parallel(n_jobs=1)]: Done 1 out of 1 | elapsed: 20.9s remaining: 0.0s
242/242 [==============================] - 1s 2ms/step - loss: 0.7452 - val_loss: 0.4860 Epoch 2/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4649 - val_loss: 0.4280 Epoch 3/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4495 - val_loss: 0.5791 Epoch 4/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4438 - val_loss: 0.4549 Epoch 5/100 242/242 [==============================] - 0s 2ms/step - loss: 0.4414 - val_loss: 0.5250 Epoch 6/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4409 - val_loss: 0.5486 Epoch 7/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4388 - val_loss: 0.5871 Epoch 8/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4381 - val_loss: 0.4759 Epoch 9/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4371 - val_loss: 0.7523 Epoch 10/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4369 - val_loss: 0.7478 Epoch 11/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4344 - val_loss: 0.8981 Epoch 12/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4347 - val_loss: 0.8543 121/121 [==============================] - 0s 1ms/step - loss: 0.4537 [CV] n_neurons=4, n_hidden=1, learning_rate=0.022174573948353458, total= 10.8s [CV] n_neurons=4, n_hidden=1, learning_rate=0.022174573948353458 ..... Epoch 1/100 242/242 [==============================] - 1s 2ms/step - loss: 10.8724 - val_loss: 4.2476 Epoch 2/100 242/242 [==============================] - 1s 2ms/step - loss: 1.0257 - val_loss: 0.5794 Epoch 3/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5263 - val_loss: 0.4357 Epoch 4/100 242/242 [==============================] - 0s 2ms/step - loss: 0.4640 - val_loss: 0.4169 Epoch 5/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4515 - val_loss: 0.4135 Epoch 6/100 242/242 [==============================] - 0s 2ms/step - loss: 0.4486 - val_loss: 0.4206 Epoch 7/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4567 - val_loss: 0.4100 Epoch 8/100 242/242 [==============================] - 0s 2ms/step - loss: 0.4495 - val_loss: 0.4155 Epoch 9/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4474 - val_loss: 0.4111 Epoch 10/100 242/242 [==============================] - 0s 2ms/step - loss: 0.4620 - val_loss: 0.4076 Epoch 11/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4470 - val_loss: 0.4062 Epoch 12/100 242/242 [==============================] - 0s 2ms/step - loss: 0.4460 - val_loss: 0.4078 Epoch 13/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4462 - val_loss: 0.4160 Epoch 14/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4445 - val_loss: 0.4158 Epoch 15/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4430 - val_loss: 0.4137 Epoch 16/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4515 - val_loss: 0.4069 Epoch 17/100 242/242 [==============================] - 0s 2ms/step - loss: 0.4422 - val_loss: 0.4119 Epoch 18/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4408 - val_loss: 0.4149 Epoch 19/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4429 - val_loss: 0.4081 Epoch 20/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4595 - val_loss: 0.4141 Epoch 21/100 242/242 [==============================] - 0s 2ms/step - loss: 0.4431 - val_loss: 0.4100 121/121 [==============================] - 0s 1ms/step - loss: 0.4473 [CV] n_neurons=4, n_hidden=1, learning_rate=0.022174573948353458, total= 11.5s [CV] n_neurons=94, n_hidden=2, learning_rate=0.005432590230265343 .... Epoch 1/100 242/242 [==============================] - 1s 3ms/step - loss: 1.1684 - val_loss: 6.2480 Epoch 2/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6285 - val_loss: 5.2166 Epoch 3/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5212 - val_loss: 0.4474 Epoch 4/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4280 - val_loss: 0.3901 Epoch 5/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4008 - val_loss: 0.3736 Epoch 6/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3840 - val_loss: 0.3803 Epoch 7/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3730 - val_loss: 0.3813 Epoch 8/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3648 - val_loss: 0.3961 Epoch 9/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3591 - val_loss: 0.3988 Epoch 10/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3542 - val_loss: 0.3891 Epoch 11/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3510 - val_loss: 0.3870 Epoch 12/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3465 - val_loss: 0.3770 Epoch 13/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3424 - val_loss: 0.3770 Epoch 14/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3408 - val_loss: 0.3843 Epoch 15/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3371 - val_loss: 0.3770 121/121 [==============================] - 0s 1ms/step - loss: 0.3561 [CV] n_neurons=94, n_hidden=2, learning_rate=0.005432590230265343, total= 10.7s [CV] n_neurons=94, n_hidden=2, learning_rate=0.005432590230265343 .... Epoch 1/100 242/242 [==============================] - 1s 3ms/step - loss: 0.8828 - val_loss: 3.5738 Epoch 2/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4887 - val_loss: 0.7767 Epoch 3/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4267 - val_loss: 0.5515 Epoch 4/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4011 - val_loss: 0.5335 Epoch 5/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3852 - val_loss: 0.5336 Epoch 6/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3746 - val_loss: 0.6750 Epoch 7/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3678 - val_loss: 0.8462 Epoch 8/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3610 - val_loss: 0.8724 Epoch 9/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3554 - val_loss: 0.9645 Epoch 10/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3525 - val_loss: 0.7225 Epoch 11/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3475 - val_loss: 0.7257 Epoch 12/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3442 - val_loss: 0.7217 Epoch 13/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3422 - val_loss: 0.8443 Epoch 14/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3407 - val_loss: 0.7065 121/121 [==============================] - 0s 1ms/step - loss: 0.3650 [CV] n_neurons=94, n_hidden=2, learning_rate=0.005432590230265343, total= 10.7s [CV] n_neurons=94, n_hidden=2, learning_rate=0.005432590230265343 .... Epoch 1/100 242/242 [==============================] - 1s 2ms/step - loss: 1.0015 - val_loss: 2.9433 Epoch 2/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5546 - val_loss: 4.2557 Epoch 3/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4854 - val_loss: 2.8526 Epoch 4/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4594 - val_loss: 1.6798 Epoch 5/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4136 - val_loss: 0.4322 Epoch 6/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3937 - val_loss: 0.4172 Epoch 7/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3829 - val_loss: 0.3769 Epoch 8/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3753 - val_loss: 0.3688 Epoch 9/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3692 - val_loss: 0.4032 Epoch 10/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3655 - val_loss: 0.3418 Epoch 11/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3610 - val_loss: 0.4452 Epoch 12/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3575 - val_loss: 0.3453 Epoch 13/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3526 - val_loss: 0.3395 Epoch 14/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3503 - val_loss: 0.4355 Epoch 15/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3489 - val_loss: 0.3388 Epoch 16/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3455 - val_loss: 0.4036 Epoch 17/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3434 - val_loss: 0.3302 Epoch 18/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3406 - val_loss: 0.3581 Epoch 19/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3386 - val_loss: 0.3548 Epoch 20/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3361 - val_loss: 0.3465 Epoch 21/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3356 - val_loss: 0.3244 Epoch 22/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3339 - val_loss: 0.3256 Epoch 23/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3316 - val_loss: 0.3429 Epoch 24/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3302 - val_loss: 0.3396 Epoch 25/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3290 - val_loss: 0.3699 Epoch 26/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3269 - val_loss: 0.4030 Epoch 27/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3259 - val_loss: 0.3157 Epoch 28/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3236 - val_loss: 0.3172 Epoch 29/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3235 - val_loss: 0.4151 Epoch 30/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3232 - val_loss: 0.3183 Epoch 31/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3196 - val_loss: 0.3158 Epoch 32/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3177 - val_loss: 0.4224 Epoch 33/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3186 - val_loss: 0.3123 Epoch 34/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3151 - val_loss: 0.4123 Epoch 35/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3181 - val_loss: 0.4489 Epoch 36/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3142 - val_loss: 0.6385 Epoch 37/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3156 - val_loss: 0.5049 Epoch 38/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3191 - val_loss: 0.8745 Epoch 39/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3198 - val_loss: 0.6009 Epoch 40/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3161 - val_loss: 0.6455 Epoch 41/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3138 - val_loss: 0.3047 Epoch 42/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3089 - val_loss: 0.3888 Epoch 43/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3091 - val_loss: 0.3094 Epoch 44/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3065 - val_loss: 0.3637 Epoch 45/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3064 - val_loss: 0.3040 Epoch 46/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3040 - val_loss: 0.4073 Epoch 47/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3031 - val_loss: 0.3583 Epoch 48/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3032 - val_loss: 0.3281 Epoch 49/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3006 - val_loss: 0.3466 Epoch 50/100 242/242 [==============================] - 1s 2ms/step - loss: 0.2998 - val_loss: 0.3248 Epoch 51/100 242/242 [==============================] - 1s 2ms/step - loss: 0.2989 - val_loss: 0.3382 Epoch 52/100 242/242 [==============================] - 1s 2ms/step - loss: 0.2983 - val_loss: 0.2992 Epoch 53/100 242/242 [==============================] - 1s 2ms/step - loss: 0.2972 - val_loss: 0.3046 Epoch 54/100 242/242 [==============================] - 1s 2ms/step - loss: 0.2963 - val_loss: 0.3056 Epoch 55/100 242/242 [==============================] - 1s 2ms/step - loss: 0.2962 - val_loss: 0.3269 Epoch 56/100 242/242 [==============================] - 1s 2ms/step - loss: 0.2949 - val_loss: 0.4620 Epoch 57/100 242/242 [==============================] - 1s 2ms/step - loss: 0.2981 - val_loss: 0.2997 Epoch 58/100 242/242 [==============================] - 1s 2ms/step - loss: 0.2947 - val_loss: 0.3866 Epoch 59/100 242/242 [==============================] - 1s 2ms/step - loss: 0.2940 - val_loss: 0.3332 Epoch 60/100 242/242 [==============================] - 1s 2ms/step - loss: 0.2932 - val_loss: 0.6139 Epoch 61/100 242/242 [==============================] - 1s 2ms/step - loss: 0.2940 - val_loss: 0.6724 Epoch 62/100 242/242 [==============================] - 1s 2ms/step - loss: 0.2957 - val_loss: 1.0578 121/121 [==============================] - 0s 1ms/step - loss: 0.3059 [CV] n_neurons=94, n_hidden=2, learning_rate=0.005432590230265343, total= 35.2s [CV] n_neurons=51, n_hidden=1, learning_rate=0.00037078874137762145 .. Epoch 1/100 242/242 [==============================] - 1s 2ms/step - loss: 4.3936 - val_loss: 13.3699 Epoch 2/100 242/242 [==============================] - 1s 2ms/step - loss: 2.2098 - val_loss: 10.8972 Epoch 3/100 242/242 [==============================] - 1s 2ms/step - loss: 1.4360 - val_loss: 7.7330 Epoch 4/100 242/242 [==============================] - 1s 2ms/step - loss: 1.0926 - val_loss: 5.0744 Epoch 5/100 242/242 [==============================] - 1s 2ms/step - loss: 0.9168 - val_loss: 3.2363 Epoch 6/100 242/242 [==============================] - 1s 2ms/step - loss: 0.8186 - val_loss: 2.1597 Epoch 7/100 242/242 [==============================] - 1s 2ms/step - loss: 0.7619 - val_loss: 1.4840 Epoch 8/100 242/242 [==============================] - 1s 2ms/step - loss: 0.7266 - val_loss: 1.1083 Epoch 9/100 242/242 [==============================] - 1s 2ms/step - loss: 0.7031 - val_loss: 0.8942 Epoch 10/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6858 - val_loss: 0.7687 Epoch 11/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6720 - val_loss: 0.6947 Epoch 12/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6602 - val_loss: 0.6524 Epoch 13/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6498 - val_loss: 0.6234 Epoch 14/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6401 - val_loss: 0.6061 Epoch 15/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6312 - val_loss: 0.5933 Epoch 16/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6227 - val_loss: 0.5819 Epoch 17/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6147 - val_loss: 0.5733 Epoch 18/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6070 - val_loss: 0.5650 Epoch 19/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5997 - val_loss: 0.5578 Epoch 20/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5926 - val_loss: 0.5508 Epoch 21/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5859 - val_loss: 0.5446 Epoch 22/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5794 - val_loss: 0.5384 Epoch 23/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5732 - val_loss: 0.5326 Epoch 24/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5671 - val_loss: 0.5266 Epoch 25/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5614 - val_loss: 0.5214 Epoch 26/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5558 - val_loss: 0.5166 Epoch 27/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5504 - val_loss: 0.5116 Epoch 28/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5453 - val_loss: 0.5076 Epoch 29/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5403 - val_loss: 0.5035 Epoch 30/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5356 - val_loss: 0.4989 Epoch 31/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5309 - val_loss: 0.4946 Epoch 32/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5265 - val_loss: 0.4915 Epoch 33/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5222 - val_loss: 0.4883 Epoch 34/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5181 - val_loss: 0.4856 Epoch 35/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5141 - val_loss: 0.4828 Epoch 36/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5103 - val_loss: 0.4789 Epoch 37/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5066 - val_loss: 0.4780 Epoch 38/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5030 - val_loss: 0.4742 Epoch 39/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4995 - val_loss: 0.4729 Epoch 40/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4962 - val_loss: 0.4714 Epoch 41/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4929 - val_loss: 0.4686 Epoch 42/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4897 - val_loss: 0.4666 Epoch 43/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4867 - val_loss: 0.4646 Epoch 44/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4837 - val_loss: 0.4636 Epoch 45/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4809 - val_loss: 0.4616 Epoch 46/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4781 - val_loss: 0.4582 Epoch 47/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4755 - val_loss: 0.4581 Epoch 48/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4729 - val_loss: 0.4573 Epoch 49/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4704 - val_loss: 0.4560 Epoch 50/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4680 - val_loss: 0.4544 Epoch 51/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4656 - val_loss: 0.4525 Epoch 52/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4633 - val_loss: 0.4527 Epoch 53/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4611 - val_loss: 0.4522 Epoch 54/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4589 - val_loss: 0.4509 Epoch 55/100 242/242 [==============================] - 0s 2ms/step - loss: 0.4568 - val_loss: 0.4509 Epoch 56/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4548 - val_loss: 0.4513 Epoch 57/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4529 - val_loss: 0.4496 Epoch 58/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4510 - val_loss: 0.4510 Epoch 59/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4491 - val_loss: 0.4502 Epoch 60/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4473 - val_loss: 0.4478 Epoch 61/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4456 - val_loss: 0.4485 Epoch 62/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4440 - val_loss: 0.4488 Epoch 63/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4423 - val_loss: 0.4477 Epoch 64/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4408 - val_loss: 0.4497 Epoch 65/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4392 - val_loss: 0.4512 Epoch 66/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4378 - val_loss: 0.4484 Epoch 67/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4363 - val_loss: 0.4483 Epoch 68/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4349 - val_loss: 0.4494 Epoch 69/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4336 - val_loss: 0.4492 Epoch 70/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4322 - val_loss: 0.4476 Epoch 71/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4309 - val_loss: 0.4481 Epoch 72/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4296 - val_loss: 0.4503 Epoch 73/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4284 - val_loss: 0.4486 Epoch 74/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4272 - val_loss: 0.4491 Epoch 75/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4261 - val_loss: 0.4496 Epoch 76/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4249 - val_loss: 0.4483 Epoch 77/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4238 - val_loss: 0.4474 Epoch 78/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4227 - val_loss: 0.4490 Epoch 79/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4217 - val_loss: 0.4495 Epoch 80/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4206 - val_loss: 0.4468 Epoch 81/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4196 - val_loss: 0.4492 Epoch 82/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4186 - val_loss: 0.4525 Epoch 83/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4177 - val_loss: 0.4504 Epoch 84/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4167 - val_loss: 0.4525 Epoch 85/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4158 - val_loss: 0.4495 Epoch 86/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4148 - val_loss: 0.4548 Epoch 87/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4140 - val_loss: 0.4512 Epoch 88/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4132 - val_loss: 0.4481 Epoch 89/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4123 - val_loss: 0.4472 Epoch 90/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4114 - val_loss: 0.4506 121/121 [==============================] - 0s 1ms/step - loss: 0.4209 [CV] n_neurons=51, n_hidden=1, learning_rate=0.00037078874137762145, total= 1.4min [CV] n_neurons=51, n_hidden=1, learning_rate=0.00037078874137762145 .. Epoch 1/100 242/242 [==============================] - 1s 3ms/step - loss: 3.4569 - val_loss: 7.5238 Epoch 2/100 242/242 [==============================] - 1s 2ms/step - loss: 1.5656 - val_loss: 8.6120 Epoch 3/100 242/242 [==============================] - 1s 2ms/step - loss: 1.0607 - val_loss: 8.4896 Epoch 4/100 242/242 [==============================] - 1s 2ms/step - loss: 0.8953 - val_loss: 7.7423 Epoch 5/100 242/242 [==============================] - 1s 2ms/step - loss: 0.8236 - val_loss: 6.8202 Epoch 6/100 242/242 [==============================] - 1s 2ms/step - loss: 0.7840 - val_loss: 5.9344 Epoch 7/100 242/242 [==============================] - 1s 2ms/step - loss: 0.7579 - val_loss: 5.1492 Epoch 8/100 242/242 [==============================] - 1s 2ms/step - loss: 0.7381 - val_loss: 4.4548 Epoch 9/100 242/242 [==============================] - 1s 2ms/step - loss: 0.7216 - val_loss: 3.9122 Epoch 10/100 242/242 [==============================] - 1s 2ms/step - loss: 0.7071 - val_loss: 3.4233 Epoch 11/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6937 - val_loss: 2.9997 Epoch 12/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6814 - val_loss: 2.6082 Epoch 13/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6701 - val_loss: 2.2766 Epoch 14/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6593 - val_loss: 1.9984 Epoch 15/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6491 - val_loss: 1.7447 Epoch 16/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6395 - val_loss: 1.5300 Epoch 17/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6303 - val_loss: 1.3410 Epoch 18/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6217 - val_loss: 1.1762 Epoch 19/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6133 - val_loss: 1.0345 Epoch 20/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6055 - val_loss: 0.9174 Epoch 21/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5980 - val_loss: 0.8153 Epoch 22/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5908 - val_loss: 0.7363 Epoch 23/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5839 - val_loss: 0.6696 Epoch 24/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5774 - val_loss: 0.6187 Epoch 25/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5711 - val_loss: 0.5778 Epoch 26/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5652 - val_loss: 0.5491 Epoch 27/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5594 - val_loss: 0.5299 Epoch 28/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5540 - val_loss: 0.5199 Epoch 29/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5486 - val_loss: 0.5172 Epoch 30/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5438 - val_loss: 0.5206 Epoch 31/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5389 - val_loss: 0.5312 Epoch 32/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5343 - val_loss: 0.5447 Epoch 33/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5298 - val_loss: 0.5639 Epoch 34/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5256 - val_loss: 0.5821 Epoch 35/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5216 - val_loss: 0.6039 Epoch 36/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5177 - val_loss: 0.6306 Epoch 37/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5140 - val_loss: 0.6564 Epoch 38/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5104 - val_loss: 0.6820 Epoch 39/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5069 - val_loss: 0.7087 121/121 [==============================] - 0s 1ms/step - loss: 0.5160 [CV] n_neurons=51, n_hidden=1, learning_rate=0.00037078874137762145, total= 22.4s [CV] n_neurons=51, n_hidden=1, learning_rate=0.00037078874137762145 .. Epoch 1/100 242/242 [==============================] - 1s 3ms/step - loss: 4.0974 - val_loss: 7.4460 Epoch 2/100 242/242 [==============================] - 1s 2ms/step - loss: 2.1844 - val_loss: 5.2071 Epoch 3/100 242/242 [==============================] - 1s 2ms/step - loss: 1.4253 - val_loss: 2.9554 Epoch 4/100 242/242 [==============================] - 1s 2ms/step - loss: 1.0762 - val_loss: 1.7752 Epoch 5/100 242/242 [==============================] - 1s 2ms/step - loss: 0.9094 - val_loss: 1.1201 Epoch 6/100 242/242 [==============================] - 1s 2ms/step - loss: 0.8243 - val_loss: 0.8519 Epoch 7/100 242/242 [==============================] - 1s 2ms/step - loss: 0.7768 - val_loss: 0.7512 Epoch 8/100 242/242 [==============================] - 1s 2ms/step - loss: 0.7473 - val_loss: 0.7064 Epoch 9/100 242/242 [==============================] - 1s 2ms/step - loss: 0.7264 - val_loss: 0.6896 Epoch 10/100 242/242 [==============================] - 1s 2ms/step - loss: 0.7098 - val_loss: 0.6760 Epoch 11/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6955 - val_loss: 0.6687 Epoch 12/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6830 - val_loss: 0.6577 Epoch 13/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6713 - val_loss: 0.6454 Epoch 14/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6604 - val_loss: 0.6355 Epoch 15/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6503 - val_loss: 0.6256 Epoch 16/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6407 - val_loss: 0.6213 Epoch 17/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6317 - val_loss: 0.6120 Epoch 18/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6230 - val_loss: 0.6024 Epoch 19/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6148 - val_loss: 0.5998 Epoch 20/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6072 - val_loss: 0.5901 Epoch 21/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5996 - val_loss: 0.5822 Epoch 22/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5925 - val_loss: 0.5763 Epoch 23/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5857 - val_loss: 0.5664 Epoch 24/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5791 - val_loss: 0.5574 Epoch 25/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5728 - val_loss: 0.5527 Epoch 26/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5668 - val_loss: 0.5452 Epoch 27/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5610 - val_loss: 0.5437 Epoch 28/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5555 - val_loss: 0.5366 Epoch 29/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5501 - val_loss: 0.5322 Epoch 30/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5450 - val_loss: 0.5264 Epoch 31/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5399 - val_loss: 0.5234 Epoch 32/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5352 - val_loss: 0.5175 Epoch 33/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5305 - val_loss: 0.5137 Epoch 34/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5262 - val_loss: 0.5078 Epoch 35/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5219 - val_loss: 0.5045 Epoch 36/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5178 - val_loss: 0.4970 Epoch 37/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5139 - val_loss: 0.4911 Epoch 38/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5101 - val_loss: 0.4887 Epoch 39/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5064 - val_loss: 0.4847 Epoch 40/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5029 - val_loss: 0.4815 Epoch 41/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4994 - val_loss: 0.4776 Epoch 42/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4962 - val_loss: 0.4736 Epoch 43/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4930 - val_loss: 0.4706 Epoch 44/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4901 - val_loss: 0.4673 Epoch 45/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4871 - val_loss: 0.4655 Epoch 46/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4843 - val_loss: 0.4625 Epoch 47/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4816 - val_loss: 0.4576 Epoch 48/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4789 - val_loss: 0.4554 Epoch 49/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4764 - val_loss: 0.4525 Epoch 50/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4740 - val_loss: 0.4495 Epoch 51/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4716 - val_loss: 0.4468 Epoch 52/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4693 - val_loss: 0.4446 Epoch 53/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4670 - val_loss: 0.4420 Epoch 54/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4649 - val_loss: 0.4394 Epoch 55/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4628 - val_loss: 0.4373 Epoch 56/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4607 - val_loss: 0.4349 Epoch 57/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4588 - val_loss: 0.4330 Epoch 58/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4569 - val_loss: 0.4311 Epoch 59/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4550 - val_loss: 0.4291 Epoch 60/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4532 - val_loss: 0.4277 Epoch 61/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4515 - val_loss: 0.4257 Epoch 62/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4498 - val_loss: 0.4241 Epoch 63/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4482 - val_loss: 0.4224 Epoch 64/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4466 - val_loss: 0.4208 Epoch 65/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4451 - val_loss: 0.4193 Epoch 66/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4436 - val_loss: 0.4180 Epoch 67/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4422 - val_loss: 0.4164 Epoch 68/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4408 - val_loss: 0.4151 Epoch 69/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4395 - val_loss: 0.4141 Epoch 70/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4382 - val_loss: 0.4124 Epoch 71/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4369 - val_loss: 0.4112 Epoch 72/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4357 - val_loss: 0.4101 Epoch 73/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4345 - val_loss: 0.4088 Epoch 74/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4334 - val_loss: 0.4081 Epoch 75/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4323 - val_loss: 0.4073 Epoch 76/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4312 - val_loss: 0.4070 Epoch 77/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4301 - val_loss: 0.4056 Epoch 78/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4291 - val_loss: 0.4040 Epoch 79/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4281 - val_loss: 0.4034 Epoch 80/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4271 - val_loss: 0.4033 Epoch 81/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4262 - val_loss: 0.4019 Epoch 82/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4253 - val_loss: 0.4008 Epoch 83/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4244 - val_loss: 0.4002 Epoch 84/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4236 - val_loss: 0.3996 Epoch 85/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4227 - val_loss: 0.3983 Epoch 86/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4219 - val_loss: 0.3980 Epoch 87/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4211 - val_loss: 0.3981 Epoch 88/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4203 - val_loss: 0.3969 Epoch 89/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4195 - val_loss: 0.3978 Epoch 90/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4188 - val_loss: 0.3961 Epoch 91/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4180 - val_loss: 0.3951 Epoch 92/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4173 - val_loss: 0.3938 Epoch 93/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4166 - val_loss: 0.3938 Epoch 94/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4159 - val_loss: 0.3935 Epoch 95/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4152 - val_loss: 0.3934 Epoch 96/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4146 - val_loss: 0.3932 Epoch 97/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4139 - val_loss: 0.3939 Epoch 98/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4132 - val_loss: 0.3913 Epoch 99/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4127 - val_loss: 0.3916 Epoch 100/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4121 - val_loss: 0.3918 121/121 [==============================] - 0s 1ms/step - loss: 0.4139 [CV] n_neurons=51, n_hidden=1, learning_rate=0.00037078874137762145, total= 1.4min [CV] n_neurons=70, n_hidden=2, learning_rate=0.0016535051383872363 ... Epoch 1/100 242/242 [==============================] - 1s 3ms/step - loss: 2.0765 - val_loss: 1.3536 Epoch 2/100 242/242 [==============================] - 1s 2ms/step - loss: 0.7485 - val_loss: 0.7463 Epoch 3/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6415 - val_loss: 0.5899 Epoch 4/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5900 - val_loss: 0.5366 Epoch 5/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5507 - val_loss: 0.5063 Epoch 6/100 242/242 [==============================] - 1s 3ms/step - loss: 0.5188 - val_loss: 0.4813 Epoch 7/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4926 - val_loss: 0.4639 Epoch 8/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4721 - val_loss: 0.4427 Epoch 9/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4552 - val_loss: 0.4393 Epoch 10/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4420 - val_loss: 0.4137 Epoch 11/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4313 - val_loss: 0.4071 Epoch 12/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4224 - val_loss: 0.3983 Epoch 13/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4147 - val_loss: 0.3933 Epoch 14/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4089 - val_loss: 0.3972 Epoch 15/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4029 - val_loss: 0.3852 Epoch 16/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3986 - val_loss: 0.3830 Epoch 17/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3939 - val_loss: 0.3947 Epoch 18/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3900 - val_loss: 0.3713 Epoch 19/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3857 - val_loss: 0.3752 Epoch 20/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3828 - val_loss: 0.3741 Epoch 21/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3803 - val_loss: 0.3782 Epoch 22/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3766 - val_loss: 0.3637 Epoch 23/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3742 - val_loss: 0.3723 Epoch 24/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3721 - val_loss: 0.3707 Epoch 25/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3691 - val_loss: 0.4047 Epoch 26/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3679 - val_loss: 0.3839 Epoch 27/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3654 - val_loss: 0.4167 Epoch 28/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3637 - val_loss: 0.3500 Epoch 29/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3607 - val_loss: 0.3792 Epoch 30/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3602 - val_loss: 0.3636 Epoch 31/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3576 - val_loss: 0.3476 Epoch 32/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3566 - val_loss: 0.3566 Epoch 33/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3548 - val_loss: 0.3611 Epoch 34/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3541 - val_loss: 0.3414 Epoch 35/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3527 - val_loss: 0.3474 Epoch 36/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3508 - val_loss: 0.3944 Epoch 37/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3507 - val_loss: 0.4401 Epoch 38/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3502 - val_loss: 0.4721 Epoch 39/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3495 - val_loss: 0.3722 Epoch 40/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3481 - val_loss: 0.4019 Epoch 41/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3467 - val_loss: 0.3376 Epoch 42/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3449 - val_loss: 0.3377 Epoch 43/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3450 - val_loss: 0.3354 Epoch 44/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3433 - val_loss: 0.3737 Epoch 45/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3441 - val_loss: 0.3336 Epoch 46/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3421 - val_loss: 0.3563 Epoch 47/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3415 - val_loss: 0.3547 Epoch 48/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3410 - val_loss: 0.3399 Epoch 49/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3400 - val_loss: 0.3304 Epoch 50/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3395 - val_loss: 0.3850 Epoch 51/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3393 - val_loss: 0.3430 Epoch 52/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3382 - val_loss: 0.3363 Epoch 53/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3378 - val_loss: 0.3386 Epoch 54/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3373 - val_loss: 0.3294 Epoch 55/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3360 - val_loss: 0.3655 Epoch 56/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3369 - val_loss: 0.3310 Epoch 57/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3354 - val_loss: 0.3730 Epoch 58/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3347 - val_loss: 0.3375 Epoch 59/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3340 - val_loss: 0.3263 Epoch 60/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3331 - val_loss: 0.3403 Epoch 61/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3329 - val_loss: 0.3436 Epoch 62/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3321 - val_loss: 0.3583 Epoch 63/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3320 - val_loss: 0.3306 Epoch 64/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3310 - val_loss: 0.3679 Epoch 65/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3310 - val_loss: 0.3298 Epoch 66/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3310 - val_loss: 0.3272 Epoch 67/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3299 - val_loss: 0.3565 Epoch 68/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3294 - val_loss: 0.3295 Epoch 69/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3282 - val_loss: 0.3440 121/121 [==============================] - 0s 2ms/step - loss: 0.3550 [CV] n_neurons=70, n_hidden=2, learning_rate=0.0016535051383872363, total= 42.7s [CV] n_neurons=70, n_hidden=2, learning_rate=0.0016535051383872363 ... Epoch 1/100 242/242 [==============================] - 1s 3ms/step - loss: 1.8880 - val_loss: 3.4090 Epoch 2/100 242/242 [==============================] - 1s 2ms/step - loss: 0.7244 - val_loss: 1.6754 Epoch 3/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6372 - val_loss: 0.9319 Epoch 4/100 242/242 [==============================] - 1s 3ms/step - loss: 0.5910 - val_loss: 0.6042 Epoch 5/100 242/242 [==============================] - 1s 3ms/step - loss: 0.5529 - val_loss: 0.5061 Epoch 6/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5225 - val_loss: 0.5058 Epoch 7/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4977 - val_loss: 0.5272 Epoch 8/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4770 - val_loss: 0.5600 Epoch 9/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4600 - val_loss: 0.5367 Epoch 10/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4469 - val_loss: 0.5221 Epoch 11/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4356 - val_loss: 0.4878 Epoch 12/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4264 - val_loss: 0.4531 Epoch 13/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4187 - val_loss: 0.4182 Epoch 14/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4124 - val_loss: 0.3877 Epoch 15/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4064 - val_loss: 0.3818 Epoch 16/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4014 - val_loss: 0.4022 Epoch 17/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3962 - val_loss: 0.4348 Epoch 18/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3926 - val_loss: 0.4935 Epoch 19/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3878 - val_loss: 0.5340 Epoch 20/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3847 - val_loss: 0.5982 Epoch 21/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3819 - val_loss: 0.6541 Epoch 22/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3786 - val_loss: 0.7245 Epoch 23/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3758 - val_loss: 0.8045 Epoch 24/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3735 - val_loss: 0.8587 Epoch 25/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3711 - val_loss: 0.9089 121/121 [==============================] - 0s 2ms/step - loss: 0.3884 [CV] n_neurons=70, n_hidden=2, learning_rate=0.0016535051383872363, total= 21.0s [CV] n_neurons=70, n_hidden=2, learning_rate=0.0016535051383872363 ... Epoch 1/100 242/242 [==============================] - 1s 3ms/step - loss: 2.1014 - val_loss: 2.1643 Epoch 2/100 242/242 [==============================] - 1s 3ms/step - loss: 0.7146 - val_loss: 0.6141 Epoch 3/100 242/242 [==============================] - 1s 3ms/step - loss: 0.6063 - val_loss: 0.5601 Epoch 4/100 242/242 [==============================] - 1s 3ms/step - loss: 0.5633 - val_loss: 0.5241 Epoch 5/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5302 - val_loss: 0.5017 Epoch 6/100 242/242 [==============================] - 1s 3ms/step - loss: 0.5034 - val_loss: 0.4749 Epoch 7/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4814 - val_loss: 0.4558 Epoch 8/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4633 - val_loss: 0.4297 Epoch 9/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4483 - val_loss: 0.4464 Epoch 10/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4374 - val_loss: 0.4189 Epoch 11/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4266 - val_loss: 0.4438 Epoch 12/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4188 - val_loss: 0.4250 Epoch 13/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4121 - val_loss: 0.4009 Epoch 14/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4067 - val_loss: 0.4403 Epoch 15/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4025 - val_loss: 0.4014 Epoch 16/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3981 - val_loss: 0.4247 Epoch 17/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3946 - val_loss: 0.3964 Epoch 18/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3914 - val_loss: 0.3974 Epoch 19/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3887 - val_loss: 0.4229 Epoch 20/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3860 - val_loss: 0.4053 Epoch 21/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3840 - val_loss: 0.3989 Epoch 22/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3815 - val_loss: 0.3957 Epoch 23/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3793 - val_loss: 0.3864 Epoch 24/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3774 - val_loss: 0.4022 Epoch 25/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3756 - val_loss: 0.3729 Epoch 26/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3733 - val_loss: 0.3645 Epoch 27/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3716 - val_loss: 0.4107 Epoch 28/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3702 - val_loss: 0.3925 Epoch 29/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3688 - val_loss: 0.4265 Epoch 30/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3677 - val_loss: 0.3879 Epoch 31/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3659 - val_loss: 0.3789 Epoch 32/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3643 - val_loss: 0.4080 Epoch 33/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3631 - val_loss: 0.3873 Epoch 34/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3617 - val_loss: 0.4232 Epoch 35/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3613 - val_loss: 0.3718 Epoch 36/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3593 - val_loss: 0.3663 121/121 [==============================] - 0s 2ms/step - loss: 0.3555 [CV] n_neurons=70, n_hidden=2, learning_rate=0.0016535051383872363, total= 41.6s [CV] n_neurons=40, n_hidden=0, learning_rate=0.01824796188192035 ..... Epoch 1/100 242/242 [==============================] - 1s 3ms/step - loss: 1.2908 - val_loss: 297.3652 Epoch 2/100 242/242 [==============================] - 1s 2ms/step - loss: 2.1716 - val_loss: 539.0366 Epoch 3/100 242/242 [==============================] - 1s 2ms/step - loss: 6.2333 - val_loss: 3736.4507 Epoch 4/100 242/242 [==============================] - 1s 2ms/step - loss: 11.9933 - val_loss: 12227.6982 Epoch 5/100 242/242 [==============================] - 1s 2ms/step - loss: 54.7041 - val_loss: 61529.1016 Epoch 6/100 242/242 [==============================] - 1s 2ms/step - loss: 2281.0823 - val_loss: 268363.5625 Epoch 7/100 242/242 [==============================] - 1s 2ms/step - loss: 2760.9927 - val_loss: 1210517.0000 Epoch 8/100 242/242 [==============================] - 1s 2ms/step - loss: 40359.3789 - val_loss: 5411004.0000 Epoch 9/100 242/242 [==============================] - 1s 2ms/step - loss: 83691.9375 - val_loss: 24506690.0000 Epoch 10/100 242/242 [==============================] - 1s 2ms/step - loss: 1055625.6250 - val_loss: 119813024.0000 Epoch 11/100 242/242 [==============================] - 1s 2ms/step - loss: 1860447.1250 - val_loss: 529731008.0000 121/121 [==============================] - 0s 1ms/step - loss: 1402365.2500 [CV] n_neurons=40, n_hidden=0, learning_rate=0.01824796188192035, total= 6.8s [CV] n_neurons=40, n_hidden=0, learning_rate=0.01824796188192035 ..... Epoch 1/100 242/242 [==============================] - 1s 3ms/step - loss: 1.0446 - val_loss: 15.8284 Epoch 2/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5210 - val_loss: 22.4892 Epoch 3/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5063 - val_loss: 24.7894 Epoch 4/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5101 - val_loss: 22.4864 Epoch 5/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5097 - val_loss: 21.9009 Epoch 6/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5089 - val_loss: 21.2895 Epoch 7/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5113 - val_loss: 19.9064 Epoch 8/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5102 - val_loss: 22.5013 Epoch 9/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5069 - val_loss: 20.0987 Epoch 10/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5087 - val_loss: 10.7128 Epoch 11/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5083 - val_loss: 19.7319 Epoch 12/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5049 - val_loss: 24.3237 Epoch 13/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5077 - val_loss: 25.9485 Epoch 14/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5199 - val_loss: 10.5277 Epoch 15/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5078 - val_loss: 17.1916 Epoch 16/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5064 - val_loss: 21.8347 Epoch 17/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5058 - val_loss: 11.7743 Epoch 18/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5101 - val_loss: 14.1555 Epoch 19/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5071 - val_loss: 20.9814 Epoch 20/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5032 - val_loss: 12.3621 Epoch 21/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5071 - val_loss: 25.9146 Epoch 22/100 242/242 [==============================] - 1s 3ms/step - loss: 0.5132 - val_loss: 16.0461 Epoch 23/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5071 - val_loss: 19.4877 Epoch 24/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5084 - val_loss: 12.1054 121/121 [==============================] - 0s 1ms/step - loss: 0.7813 [CV] n_neurons=40, n_hidden=0, learning_rate=0.01824796188192035, total= 21.0s [CV] n_neurons=40, n_hidden=0, learning_rate=0.01824796188192035 ..... Epoch 1/100 242/242 [==============================] - 1s 3ms/step - loss: 1.2328 - val_loss: 307.7496 Epoch 2/100 242/242 [==============================] - 1s 2ms/step - loss: 0.9214 - val_loss: 76.3015 Epoch 3/100 242/242 [==============================] - 1s 2ms/step - loss: 1.3774 - val_loss: 795.2292 Epoch 4/100 242/242 [==============================] - 1s 2ms/step - loss: 34.9847 - val_loss: 704.0450 Epoch 5/100 242/242 [==============================] - 1s 2ms/step - loss: 1.3027 - val_loss: 2668.0286 Epoch 6/100 242/242 [==============================] - 1s 2ms/step - loss: 9.2431 - val_loss: 1446.2605 Epoch 7/100 242/242 [==============================] - 1s 2ms/step - loss: 4.8034 - val_loss: 1540.5377 Epoch 8/100 242/242 [==============================] - 1s 2ms/step - loss: 41.9016 - val_loss: 1396.7115 Epoch 9/100 242/242 [==============================] - 1s 2ms/step - loss: 10.9509 - val_loss: 1334.0847 Epoch 10/100 242/242 [==============================] - 1s 2ms/step - loss: 1.4803 - val_loss: 216.7268 Epoch 11/100 242/242 [==============================] - 1s 2ms/step - loss: 13.8366 - val_loss: 125.2065 Epoch 12/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6450 - val_loss: 2.2902 Epoch 13/100 242/242 [==============================] - 1s 2ms/step - loss: 0.7449 - val_loss: 790.5424 Epoch 14/100 242/242 [==============================] - 1s 2ms/step - loss: 9.2398 - val_loss: 468.7424 Epoch 15/100 242/242 [==============================] - 1s 2ms/step - loss: 2.2300 - val_loss: 1073.9149 Epoch 16/100 242/242 [==============================] - 1s 2ms/step - loss: 37.3800 - val_loss: 865.6385 Epoch 17/100 242/242 [==============================] - 1s 2ms/step - loss: 8.9708 - val_loss: 1128.1501 Epoch 18/100 242/242 [==============================] - 1s 2ms/step - loss: 3.8291 - val_loss: 499.5191 Epoch 19/100 242/242 [==============================] - 1s 2ms/step - loss: 24.8681 - val_loss: 309.7941 Epoch 20/100 242/242 [==============================] - 1s 2ms/step - loss: 3.6469 - val_loss: 354.6341 Epoch 21/100 242/242 [==============================] - 1s 2ms/step - loss: 4.2841 - val_loss: 559.4488 Epoch 22/100 242/242 [==============================] - 1s 2ms/step - loss: 4.5495 - val_loss: 393.8696 121/121 [==============================] - 0s 1ms/step - loss: 0.6226 [CV] n_neurons=40, n_hidden=0, learning_rate=0.01824796188192035, total= 21.0s [CV] n_neurons=30, n_hidden=3, learning_rate=0.0045455096956331 ...... Epoch 1/100 242/242 [==============================] - 1s 3ms/step - loss: 1.2632 - val_loss: 1.4543 Epoch 2/100 242/242 [==============================] - 1s 3ms/step - loss: 0.6364 - val_loss: 0.9557 Epoch 3/100 242/242 [==============================] - 1s 3ms/step - loss: 0.5396 - val_loss: 0.4628 Epoch 4/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4783 - val_loss: 0.4214 Epoch 5/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4400 - val_loss: 0.3984 Epoch 6/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4169 - val_loss: 0.4056 Epoch 7/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4004 - val_loss: 0.3741 Epoch 8/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3888 - val_loss: 0.3926 Epoch 9/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3809 - val_loss: 0.3832 Epoch 10/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3743 - val_loss: 0.3929 Epoch 11/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3697 - val_loss: 0.3570 Epoch 12/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3640 - val_loss: 0.3790 Epoch 13/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3595 - val_loss: 0.3840 Epoch 14/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3569 - val_loss: 0.3950 Epoch 15/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3530 - val_loss: 0.3751 Epoch 16/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3503 - val_loss: 0.3955 Epoch 17/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3476 - val_loss: 0.3900 Epoch 18/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3455 - val_loss: 0.3905 Epoch 19/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3420 - val_loss: 0.3944 Epoch 20/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3404 - val_loss: 0.3811 Epoch 21/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3378 - val_loss: 0.3906 121/121 [==============================] - 0s 2ms/step - loss: 0.3624 [CV] n_neurons=30, n_hidden=3, learning_rate=0.0045455096956331, total= 14.4s [CV] n_neurons=30, n_hidden=3, learning_rate=0.0045455096956331 ...... Epoch 1/100 242/242 [==============================] - 1s 3ms/step - loss: 1.0130 - val_loss: 0.5822 Epoch 2/100 242/242 [==============================] - 1s 3ms/step - loss: 0.5558 - val_loss: 0.4873 Epoch 3/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4781 - val_loss: 0.4420 Epoch 4/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4376 - val_loss: 0.4139 Epoch 5/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4111 - val_loss: 0.4132 Epoch 6/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3947 - val_loss: 0.4464 Epoch 7/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3840 - val_loss: 0.4717 Epoch 8/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3760 - val_loss: 0.5331 Epoch 9/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3682 - val_loss: 0.6951 Epoch 10/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3633 - val_loss: 0.6944 Epoch 11/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3574 - val_loss: 0.8506 Epoch 12/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3535 - val_loss: 0.7660 Epoch 13/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3509 - val_loss: 0.8731 Epoch 14/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3487 - val_loss: 0.9306 Epoch 15/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3463 - val_loss: 0.9345 121/121 [==============================] - 0s 2ms/step - loss: 0.3685 [CV] n_neurons=30, n_hidden=3, learning_rate=0.0045455096956331, total= 10.9s [CV] n_neurons=30, n_hidden=3, learning_rate=0.0045455096956331 ...... Epoch 1/100 242/242 [==============================] - 1s 3ms/step - loss: 1.1090 - val_loss: 0.6796 Epoch 2/100 242/242 [==============================] - 1s 3ms/step - loss: 0.5639 - val_loss: 0.4957 Epoch 3/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4920 - val_loss: 0.4633 Epoch 4/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4547 - val_loss: 0.4565 Epoch 5/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4305 - val_loss: 0.4150 Epoch 6/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4141 - val_loss: 0.4331 Epoch 7/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4026 - val_loss: 0.3887 Epoch 8/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3936 - val_loss: 0.3785 Epoch 9/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3857 - val_loss: 0.4233 Epoch 10/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3819 - val_loss: 0.3652 Epoch 11/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3760 - val_loss: 0.4336 Epoch 12/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3725 - val_loss: 0.3763 Epoch 13/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3683 - val_loss: 0.3632 Epoch 14/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3644 - val_loss: 0.4460 Epoch 15/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3624 - val_loss: 0.3555 Epoch 16/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3584 - val_loss: 0.3947 Epoch 17/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3560 - val_loss: 0.3623 Epoch 18/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3525 - val_loss: 0.3774 Epoch 19/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3505 - val_loss: 0.3806 Epoch 20/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3479 - val_loss: 0.3420 Epoch 21/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3463 - val_loss: 0.3452 Epoch 22/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3446 - val_loss: 0.3273 Epoch 23/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3425 - val_loss: 0.3279 Epoch 24/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3395 - val_loss: 0.4328 Epoch 25/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3401 - val_loss: 0.3426 Epoch 26/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3366 - val_loss: 0.3228 Epoch 27/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3342 - val_loss: 0.4407 Epoch 28/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3357 - val_loss: 0.3301 Epoch 29/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3331 - val_loss: 0.4053 Epoch 30/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3323 - val_loss: 0.3360 Epoch 31/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3293 - val_loss: 0.3329 Epoch 32/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3279 - val_loss: 0.3659 Epoch 33/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3270 - val_loss: 0.3483 Epoch 34/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3243 - val_loss: 0.3597 Epoch 35/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3246 - val_loss: 0.3132 Epoch 36/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3224 - val_loss: 0.3623 Epoch 37/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3221 - val_loss: 0.3365 Epoch 38/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3211 - val_loss: 0.5344 Epoch 39/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3231 - val_loss: 0.3311 Epoch 40/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3195 - val_loss: 0.4003 Epoch 41/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3186 - val_loss: 0.3126 Epoch 42/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3164 - val_loss: 0.3867 Epoch 43/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3161 - val_loss: 0.3084 Epoch 44/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3144 - val_loss: 0.3481 Epoch 45/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3151 - val_loss: 0.3085 Epoch 46/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3128 - val_loss: 0.3754 Epoch 47/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3119 - val_loss: 0.3117 Epoch 48/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3119 - val_loss: 0.3004 Epoch 49/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3095 - val_loss: 0.4113 Epoch 50/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3113 - val_loss: 0.2997 Epoch 51/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3085 - val_loss: 0.4087 Epoch 52/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3089 - val_loss: 0.3072 Epoch 53/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3077 - val_loss: 0.3030 Epoch 54/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3070 - val_loss: 0.2966 Epoch 55/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3059 - val_loss: 0.2950 Epoch 56/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3037 - val_loss: 0.3551 Epoch 57/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3058 - val_loss: 0.3012 Epoch 58/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3047 - val_loss: 0.2981 Epoch 59/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3033 - val_loss: 0.3262 Epoch 60/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3045 - val_loss: 0.3689 Epoch 61/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3033 - val_loss: 0.3175 Epoch 62/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3013 - val_loss: 0.2994 Epoch 63/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3009 - val_loss: 0.2967 Epoch 64/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3005 - val_loss: 0.2973 Epoch 65/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3008 - val_loss: 0.3042 121/121 [==============================] - 0s 2ms/step - loss: 0.3098 [CV] n_neurons=30, n_hidden=3, learning_rate=0.0045455096956331, total= 1.4min [CV] n_neurons=49, n_hidden=1, learning_rate=0.0020587676114196545 ... Epoch 1/100 242/242 [==============================] - 1s 3ms/step - loss: 2.1150 - val_loss: 29.5063 Epoch 2/100 242/242 [==============================] - 1s 3ms/step - loss: 1.0854 - val_loss: 33.7784 Epoch 3/100 242/242 [==============================] - 1s 2ms/step - loss: 0.9418 - val_loss: 4.0125 Epoch 4/100 242/242 [==============================] - 1s 2ms/step - loss: 0.6369 - val_loss: 0.5556 Epoch 5/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5632 - val_loss: 0.5119 Epoch 6/100 242/242 [==============================] - 1s 3ms/step - loss: 0.5371 - val_loss: 0.4888 Epoch 7/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5149 - val_loss: 0.4729 Epoch 8/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4964 - val_loss: 0.4559 Epoch 9/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4803 - val_loss: 0.4601 Epoch 10/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4670 - val_loss: 0.4303 Epoch 11/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4561 - val_loss: 0.4205 Epoch 12/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4469 - val_loss: 0.4242 Epoch 13/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4391 - val_loss: 0.4107 Epoch 14/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4325 - val_loss: 0.4231 Epoch 15/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4270 - val_loss: 0.4221 Epoch 16/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4221 - val_loss: 0.4084 Epoch 17/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4179 - val_loss: 0.4209 Epoch 18/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4143 - val_loss: 0.4017 Epoch 19/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4106 - val_loss: 0.4322 Epoch 20/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4080 - val_loss: 0.4001 Epoch 21/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4050 - val_loss: 0.4263 Epoch 22/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4027 - val_loss: 0.4032 Epoch 23/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4002 - val_loss: 0.4039 Epoch 24/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3981 - val_loss: 0.3764 Epoch 25/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3962 - val_loss: 0.4241 Epoch 26/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3944 - val_loss: 0.3779 Epoch 27/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3925 - val_loss: 0.4126 Epoch 28/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3910 - val_loss: 0.3967 Epoch 29/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3889 - val_loss: 0.4045 Epoch 30/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3880 - val_loss: 0.3748 Epoch 31/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3864 - val_loss: 0.3717 Epoch 32/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3851 - val_loss: 0.3676 Epoch 33/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3837 - val_loss: 0.4054 Epoch 34/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3825 - val_loss: 0.3924 Epoch 35/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3815 - val_loss: 0.3611 Epoch 36/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3800 - val_loss: 0.4182 Epoch 37/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3792 - val_loss: 0.3539 Epoch 38/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3779 - val_loss: 0.4403 Epoch 39/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3769 - val_loss: 0.3551 Epoch 40/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3757 - val_loss: 0.4125 Epoch 41/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3748 - val_loss: 0.3665 Epoch 42/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3735 - val_loss: 0.3591 Epoch 43/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3730 - val_loss: 0.3570 Epoch 44/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3717 - val_loss: 0.4125 Epoch 45/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3714 - val_loss: 0.3547 Epoch 46/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3699 - val_loss: 0.3779 Epoch 47/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3692 - val_loss: 0.3886 121/121 [==============================] - 0s 2ms/step - loss: 0.3877 [CV] n_neurons=49, n_hidden=1, learning_rate=0.0020587676114196545, total= 29.5s [CV] n_neurons=49, n_hidden=1, learning_rate=0.0020587676114196545 ... Epoch 1/100 242/242 [==============================] - 1s 3ms/step - loss: 1.8463 - val_loss: 0.7805 Epoch 2/100 242/242 [==============================] - 1s 3ms/step - loss: 0.7088 - val_loss: 1.1550 Epoch 3/100 242/242 [==============================] - 1s 3ms/step - loss: 0.6196 - val_loss: 1.8115 Epoch 4/100 242/242 [==============================] - 1s 3ms/step - loss: 0.5692 - val_loss: 2.6113 Epoch 5/100 242/242 [==============================] - 1s 2ms/step - loss: 0.5319 - val_loss: 3.2626 Epoch 6/100 242/242 [==============================] - 1s 3ms/step - loss: 0.5046 - val_loss: 3.5247 Epoch 7/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4841 - val_loss: 3.5926 Epoch 8/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4679 - val_loss: 3.5562 Epoch 9/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4551 - val_loss: 2.9541 Epoch 10/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4459 - val_loss: 2.5606 Epoch 11/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4377 - val_loss: 2.1560 121/121 [==============================] - 0s 1ms/step - loss: 0.4866 [CV] n_neurons=49, n_hidden=1, learning_rate=0.0020587676114196545, total= 7.4s [CV] n_neurons=49, n_hidden=1, learning_rate=0.0020587676114196545 ... Epoch 1/100 242/242 [==============================] - 1s 3ms/step - loss: 1.7445 - val_loss: 2.5834 Epoch 2/100 242/242 [==============================] - 1s 3ms/step - loss: 0.7268 - val_loss: 3.5564 Epoch 3/100 242/242 [==============================] - 1s 3ms/step - loss: 0.6419 - val_loss: 1.7895 Epoch 4/100 242/242 [==============================] - 1s 3ms/step - loss: 0.6134 - val_loss: 1.7436 Epoch 5/100 242/242 [==============================] - 1s 3ms/step - loss: 0.5564 - val_loss: 0.6344 Epoch 6/100 242/242 [==============================] - 1s 3ms/step - loss: 0.5303 - val_loss: 0.8713 Epoch 7/100 242/242 [==============================] - 1s 3ms/step - loss: 0.5070 - val_loss: 0.5604 Epoch 8/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4919 - val_loss: 0.4695 Epoch 9/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4759 - val_loss: 0.4942 Epoch 10/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4659 - val_loss: 0.4375 Epoch 11/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4561 - val_loss: 0.4536 Epoch 12/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4481 - val_loss: 0.4276 Epoch 13/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4408 - val_loss: 0.4084 Epoch 14/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4354 - val_loss: 0.4897 Epoch 15/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4300 - val_loss: 0.4018 Epoch 16/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4246 - val_loss: 0.5505 Epoch 17/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4211 - val_loss: 0.4602 Epoch 18/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4171 - val_loss: 0.4347 Epoch 19/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4132 - val_loss: 0.3835 Epoch 20/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4092 - val_loss: 0.4115 Epoch 21/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4065 - val_loss: 0.3817 Epoch 22/100 242/242 [==============================] - 1s 2ms/step - loss: 0.4035 - val_loss: 0.3737 Epoch 23/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4008 - val_loss: 0.3720 Epoch 24/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3983 - val_loss: 0.4318 Epoch 25/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3970 - val_loss: 0.4158 Epoch 26/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3943 - val_loss: 0.3821 Epoch 27/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3922 - val_loss: 0.4069 Epoch 28/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3909 - val_loss: 0.4024 Epoch 29/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3887 - val_loss: 0.5904 Epoch 30/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3898 - val_loss: 0.4027 Epoch 31/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3858 - val_loss: 0.4216 Epoch 32/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3847 - val_loss: 0.3603 Epoch 33/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3825 - val_loss: 0.4134 Epoch 34/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3819 - val_loss: 0.3633 Epoch 35/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3805 - val_loss: 0.3542 Epoch 36/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3786 - val_loss: 0.3568 Epoch 37/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3783 - val_loss: 0.4216 Epoch 38/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3770 - val_loss: 0.5522 Epoch 39/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3792 - val_loss: 0.5648 Epoch 40/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3763 - val_loss: 0.6416 Epoch 41/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3785 - val_loss: 0.3847 Epoch 42/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3729 - val_loss: 0.5255 Epoch 43/100 242/242 [==============================] - 1s 2ms/step - loss: 0.3744 - val_loss: 0.7023 Epoch 44/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3741 - val_loss: 0.7507 Epoch 45/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3771 - val_loss: 0.5608 121/121 [==============================] - 0s 1ms/step - loss: 0.3745 [CV] n_neurons=49, n_hidden=1, learning_rate=0.0020587676114196545, total= 41.4s [CV] n_neurons=74, n_hidden=3, learning_rate=0.005803602934201024 .... Epoch 1/100 242/242 [==============================] - 1s 3ms/step - loss: 1.0682 - val_loss: 6.4183 Epoch 2/100 242/242 [==============================] - 1s 3ms/step - loss: 0.7154 - val_loss: 16.7917 Epoch 3/100 242/242 [==============================] - 1s 3ms/step - loss: 0.5830 - val_loss: 4.7823 Epoch 4/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4475 - val_loss: 8.6076 Epoch 5/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4779 - val_loss: 1.8025 Epoch 6/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4007 - val_loss: 0.3655 Epoch 7/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3682 - val_loss: 0.3786 Epoch 8/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3591 - val_loss: 0.4054 Epoch 9/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3533 - val_loss: 0.3910 Epoch 10/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3488 - val_loss: 0.3912 Epoch 11/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3445 - val_loss: 0.3550 Epoch 12/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3394 - val_loss: 0.3612 Epoch 13/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3351 - val_loss: 0.3650 Epoch 14/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3334 - val_loss: 0.3625 Epoch 15/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3292 - val_loss: 0.3565 Epoch 16/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3272 - val_loss: 0.3558 Epoch 17/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3239 - val_loss: 0.3555 Epoch 18/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3223 - val_loss: 0.3500 Epoch 19/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3185 - val_loss: 0.3504 Epoch 20/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3162 - val_loss: 0.3392 Epoch 21/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3145 - val_loss: 0.3365 Epoch 22/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3120 - val_loss: 0.3693 Epoch 23/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3099 - val_loss: 0.3195 Epoch 24/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3090 - val_loss: 0.3087 Epoch 25/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3046 - val_loss: 0.3589 Epoch 26/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3035 - val_loss: 0.3122 Epoch 27/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3030 - val_loss: 0.3275 Epoch 28/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3019 - val_loss: 0.3536 Epoch 29/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2986 - val_loss: 0.3315 Epoch 30/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2963 - val_loss: 0.2960 Epoch 31/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2953 - val_loss: 0.3122 Epoch 32/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2935 - val_loss: 0.2887 Epoch 33/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2911 - val_loss: 0.3220 Epoch 34/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2890 - val_loss: 0.3172 Epoch 35/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2880 - val_loss: 0.2943 Epoch 36/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2869 - val_loss: 0.3723 Epoch 37/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2858 - val_loss: 0.3260 Epoch 38/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2851 - val_loss: 0.3555 Epoch 39/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2838 - val_loss: 0.2923 Epoch 40/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2823 - val_loss: 0.3324 Epoch 41/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2808 - val_loss: 0.2885 Epoch 42/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2800 - val_loss: 0.2909 Epoch 43/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2792 - val_loss: 0.2860 Epoch 44/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2768 - val_loss: 0.3188 Epoch 45/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2767 - val_loss: 0.3108 Epoch 46/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2756 - val_loss: 0.3249 Epoch 47/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2739 - val_loss: 0.2890 Epoch 48/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2737 - val_loss: 0.2840 Epoch 49/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2713 - val_loss: 0.2777 Epoch 50/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2714 - val_loss: 0.3328 Epoch 51/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2700 - val_loss: 0.3179 Epoch 52/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2687 - val_loss: 0.3177 Epoch 53/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2684 - val_loss: 0.2842 Epoch 54/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2667 - val_loss: 0.2830 Epoch 55/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2653 - val_loss: 0.2988 Epoch 56/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2660 - val_loss: 0.2723 Epoch 57/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2639 - val_loss: 0.3335 Epoch 58/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2645 - val_loss: 0.2753 Epoch 59/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2633 - val_loss: 0.2905 Epoch 60/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2614 - val_loss: 0.2812 Epoch 61/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2627 - val_loss: 0.3755 Epoch 62/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2613 - val_loss: 0.2795 Epoch 63/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2600 - val_loss: 0.3285 Epoch 64/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2603 - val_loss: 0.2762 Epoch 65/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2574 - val_loss: 0.3070 Epoch 66/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2584 - val_loss: 0.3185 121/121 [==============================] - 0s 2ms/step - loss: 0.3105 [CV] n_neurons=74, n_hidden=3, learning_rate=0.005803602934201024, total= 44.9s [CV] n_neurons=74, n_hidden=3, learning_rate=0.005803602934201024 .... Epoch 1/100 242/242 [==============================] - 1s 4ms/step - loss: 0.8717 - val_loss: 0.7369 Epoch 2/100 242/242 [==============================] - 1s 3ms/step - loss: 0.5007 - val_loss: 0.4431 Epoch 3/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4272 - val_loss: 0.3919 Epoch 4/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3961 - val_loss: 0.3834 Epoch 5/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3777 - val_loss: 0.3951 Epoch 6/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3661 - val_loss: 0.4650 Epoch 7/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3580 - val_loss: 0.6408 Epoch 8/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3524 - val_loss: 0.7273 Epoch 9/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3462 - val_loss: 0.9104 Epoch 10/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3421 - val_loss: 0.6969 Epoch 11/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3364 - val_loss: 0.6999 Epoch 12/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3327 - val_loss: 0.7835 Epoch 13/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3292 - val_loss: 0.8539 Epoch 14/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3277 - val_loss: 0.8282 121/121 [==============================] - 0s 2ms/step - loss: 0.3525 [CV] n_neurons=74, n_hidden=3, learning_rate=0.005803602934201024, total= 10.5s [CV] n_neurons=74, n_hidden=3, learning_rate=0.005803602934201024 .... Epoch 1/100 242/242 [==============================] - 1s 3ms/step - loss: 0.9177 - val_loss: 0.9196 Epoch 2/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4767 - val_loss: 2.1030 Epoch 3/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4329 - val_loss: 3.5546 Epoch 4/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4408 - val_loss: 1.5870 Epoch 5/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3906 - val_loss: 0.4229 Epoch 6/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3713 - val_loss: 0.3736 Epoch 7/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3634 - val_loss: 0.3347 Epoch 8/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3569 - val_loss: 0.3389 Epoch 9/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3518 - val_loss: 0.3714 Epoch 10/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3515 - val_loss: 0.3271 Epoch 11/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3456 - val_loss: 0.3873 Epoch 12/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3418 - val_loss: 0.3337 Epoch 13/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3372 - val_loss: 0.3220 Epoch 14/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3345 - val_loss: 0.3691 Epoch 15/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3321 - val_loss: 0.3202 Epoch 16/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3290 - val_loss: 0.3598 Epoch 17/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3268 - val_loss: 0.3241 Epoch 18/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3238 - val_loss: 0.3532 Epoch 19/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3222 - val_loss: 0.3357 Epoch 20/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3194 - val_loss: 0.3616 Epoch 21/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3176 - val_loss: 0.3152 Epoch 22/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3152 - val_loss: 0.3175 Epoch 23/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3129 - val_loss: 0.3580 Epoch 24/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3118 - val_loss: 0.3041 Epoch 25/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3096 - val_loss: 0.3216 Epoch 26/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3067 - val_loss: 0.3245 Epoch 27/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3053 - val_loss: 0.3125 Epoch 28/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3044 - val_loss: 0.3490 Epoch 29/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3051 - val_loss: 0.3862 Epoch 30/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3034 - val_loss: 0.3183 Epoch 31/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2996 - val_loss: 0.3122 Epoch 32/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2974 - val_loss: 0.3014 Epoch 33/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2976 - val_loss: 0.3222 Epoch 34/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2959 - val_loss: 0.3020 Epoch 35/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2932 - val_loss: 0.2965 Epoch 36/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2908 - val_loss: 0.4448 Epoch 37/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2910 - val_loss: 0.3683 Epoch 38/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2941 - val_loss: 0.4166 Epoch 39/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2913 - val_loss: 0.2964 Epoch 40/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2919 - val_loss: 0.3617 Epoch 41/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2875 - val_loss: 0.3154 Epoch 42/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2859 - val_loss: 0.3138 Epoch 43/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2860 - val_loss: 0.2842 Epoch 44/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2830 - val_loss: 0.3241 Epoch 45/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2837 - val_loss: 0.2951 Epoch 46/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2811 - val_loss: 0.3328 Epoch 47/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2803 - val_loss: 0.2820 Epoch 48/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2800 - val_loss: 0.2838 Epoch 49/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2797 - val_loss: 0.3641 Epoch 50/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2781 - val_loss: 0.2970 Epoch 51/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2772 - val_loss: 0.3488 Epoch 52/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2765 - val_loss: 0.2949 Epoch 53/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2758 - val_loss: 0.2940 Epoch 54/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2754 - val_loss: 0.2973 Epoch 55/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2739 - val_loss: 0.2806 Epoch 56/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2746 - val_loss: 0.3015 Epoch 57/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2733 - val_loss: 0.3550 Epoch 58/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2752 - val_loss: 0.2928 Epoch 59/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2720 - val_loss: 0.3158 Epoch 60/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2709 - val_loss: 0.2777 Epoch 61/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2694 - val_loss: 0.3425 Epoch 62/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2696 - val_loss: 0.2753 Epoch 63/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2688 - val_loss: 0.3171 Epoch 64/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2686 - val_loss: 0.2864 Epoch 65/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2689 - val_loss: 0.2885 Epoch 66/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2660 - val_loss: 0.2939 Epoch 67/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2654 - val_loss: 0.2901 Epoch 68/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2720 - val_loss: 0.2933 Epoch 69/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2652 - val_loss: 0.2779 Epoch 70/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2630 - val_loss: 0.2849 Epoch 71/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2643 - val_loss: 0.2742 Epoch 72/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2641 - val_loss: 0.2750 Epoch 73/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2637 - val_loss: 0.2925 Epoch 74/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2631 - val_loss: 0.2811 Epoch 75/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2625 - val_loss: 0.2931 Epoch 76/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2590 - val_loss: 0.3316 Epoch 77/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2619 - val_loss: 0.2795 Epoch 78/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2611 - val_loss: 0.4124 Epoch 79/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2609 - val_loss: 0.2784 Epoch 80/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2583 - val_loss: 0.2716 Epoch 81/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2587 - val_loss: 0.2975 Epoch 82/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2570 - val_loss: 0.2786 Epoch 83/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2579 - val_loss: 0.3338 Epoch 84/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2599 - val_loss: 0.2742 Epoch 85/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2568 - val_loss: 0.2930 Epoch 86/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2561 - val_loss: 0.2810 Epoch 87/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2567 - val_loss: 0.3121 Epoch 88/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2540 - val_loss: 0.2711 Epoch 89/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2541 - val_loss: 0.2792 Epoch 90/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2549 - val_loss: 0.3115 Epoch 91/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2564 - val_loss: 0.2751 Epoch 92/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2544 - val_loss: 0.2695 Epoch 93/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2531 - val_loss: 0.2903 Epoch 94/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2523 - val_loss: 0.2842 Epoch 95/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2519 - val_loss: 0.3035 Epoch 96/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2502 - val_loss: 0.3096 Epoch 97/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2538 - val_loss: 0.2928 Epoch 98/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2530 - val_loss: 0.3358 Epoch 99/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2514 - val_loss: 0.2702 Epoch 100/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2515 - val_loss: 0.2945 121/121 [==============================] - 0s 2ms/step - loss: 0.2939 [CV] n_neurons=74, n_hidden=3, learning_rate=0.005803602934201024, total= 1.4min [CV] n_neurons=80, n_hidden=3, learning_rate=0.0059640580092043885 ... Epoch 1/100 242/242 [==============================] - 1s 3ms/step - loss: 0.9615 - val_loss: 10.9250 Epoch 2/100 242/242 [==============================] - 1s 3ms/step - loss: 0.5921 - val_loss: 3.3912 Epoch 3/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4539 - val_loss: 0.4039 Epoch 4/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3914 - val_loss: 0.3692 Epoch 5/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3713 - val_loss: 0.3555 Epoch 6/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3601 - val_loss: 0.3875 Epoch 7/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3523 - val_loss: 0.3633 Epoch 8/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3454 - val_loss: 0.3991 Epoch 9/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3411 - val_loss: 0.3797 Epoch 10/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3382 - val_loss: 0.3705 Epoch 11/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3351 - val_loss: 0.3311 Epoch 12/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3304 - val_loss: 0.3510 Epoch 13/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3269 - val_loss: 0.3788 Epoch 14/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3261 - val_loss: 0.3316 Epoch 15/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3217 - val_loss: 0.3492 Epoch 16/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3194 - val_loss: 0.3485 Epoch 17/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3172 - val_loss: 0.3253 Epoch 18/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3157 - val_loss: 0.3438 Epoch 19/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3124 - val_loss: 0.3338 Epoch 20/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3108 - val_loss: 0.3216 Epoch 21/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3086 - val_loss: 0.3926 Epoch 22/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3087 - val_loss: 0.3103 Epoch 23/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3050 - val_loss: 0.3752 Epoch 24/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3068 - val_loss: 0.3714 Epoch 25/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3032 - val_loss: 0.3220 Epoch 26/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2987 - val_loss: 0.3083 Epoch 27/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2978 - val_loss: 0.3303 Epoch 28/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2971 - val_loss: 0.3395 Epoch 29/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2942 - val_loss: 0.3366 Epoch 30/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2922 - val_loss: 0.3121 Epoch 31/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2922 - val_loss: 0.3181 Epoch 32/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2898 - val_loss: 0.2886 Epoch 33/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2873 - val_loss: 0.3384 Epoch 34/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2856 - val_loss: 0.2906 Epoch 35/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2839 - val_loss: 0.2999 Epoch 36/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2826 - val_loss: 0.4515 Epoch 37/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2842 - val_loss: 0.6267 Epoch 38/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2918 - val_loss: 0.4893 Epoch 39/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2845 - val_loss: 0.2884 Epoch 40/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2818 - val_loss: 0.3473 Epoch 41/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2799 - val_loss: 0.2923 Epoch 42/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2778 - val_loss: 0.3041 Epoch 43/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2776 - val_loss: 0.2873 Epoch 44/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2756 - val_loss: 0.3304 Epoch 45/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2758 - val_loss: 0.2796 Epoch 46/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2742 - val_loss: 0.3144 Epoch 47/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2726 - val_loss: 0.3185 Epoch 48/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2722 - val_loss: 0.3024 Epoch 49/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2704 - val_loss: 0.2762 Epoch 50/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2703 - val_loss: 0.3676 Epoch 51/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2691 - val_loss: 0.3115 Epoch 52/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2665 - val_loss: 0.3507 Epoch 53/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2681 - val_loss: 0.2735 Epoch 54/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2651 - val_loss: 0.3027 Epoch 55/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2642 - val_loss: 0.2830 Epoch 56/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2644 - val_loss: 0.2804 Epoch 57/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2630 - val_loss: 0.3494 Epoch 58/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2634 - val_loss: 0.2916 Epoch 59/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2628 - val_loss: 0.2750 Epoch 60/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2616 - val_loss: 0.3144 Epoch 61/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2613 - val_loss: 0.3114 Epoch 62/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2597 - val_loss: 0.3386 Epoch 63/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2604 - val_loss: 0.2923 121/121 [==============================] - 0s 2ms/step - loss: 0.3123 [CV] n_neurons=80, n_hidden=3, learning_rate=0.0059640580092043885, total= 1.4min [CV] n_neurons=80, n_hidden=3, learning_rate=0.0059640580092043885 ... Epoch 1/100 242/242 [==============================] - 1s 3ms/step - loss: 0.8381 - val_loss: 0.6551 Epoch 2/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4570 - val_loss: 0.4129 Epoch 3/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4107 - val_loss: 0.6097 Epoch 4/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3910 - val_loss: 0.6571 Epoch 5/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3755 - val_loss: 0.6378 Epoch 6/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3647 - val_loss: 0.8581 Epoch 7/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3602 - val_loss: 1.0634 Epoch 8/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3515 - val_loss: 1.1230 Epoch 9/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3456 - val_loss: 1.2174 Epoch 10/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3424 - val_loss: 0.7974 Epoch 11/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3374 - val_loss: 0.8339 Epoch 12/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3337 - val_loss: 0.7604 121/121 [==============================] - 0s 2ms/step - loss: 0.3594 [CV] n_neurons=80, n_hidden=3, learning_rate=0.0059640580092043885, total= 8.9s [CV] n_neurons=80, n_hidden=3, learning_rate=0.0059640580092043885 ... Epoch 1/100 242/242 [==============================] - 1s 3ms/step - loss: 0.8683 - val_loss: 2.2007 Epoch 2/100 242/242 [==============================] - 1s 3ms/step - loss: 0.5072 - val_loss: 3.3028 Epoch 3/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4486 - val_loss: 0.9130 Epoch 4/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4071 - val_loss: 0.5328 Epoch 5/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3842 - val_loss: 0.3609 Epoch 6/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3729 - val_loss: 0.4151 Epoch 7/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3659 - val_loss: 0.3580 Epoch 8/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3596 - val_loss: 0.3516 Epoch 9/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3528 - val_loss: 0.3983 Epoch 10/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3511 - val_loss: 0.3323 Epoch 11/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3445 - val_loss: 0.4228 Epoch 12/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3420 - val_loss: 0.3284 Epoch 13/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3374 - val_loss: 0.3469 Epoch 14/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3337 - val_loss: 0.4038 Epoch 15/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3326 - val_loss: 0.3274 Epoch 16/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3291 - val_loss: 0.3821 Epoch 17/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3262 - val_loss: 0.3207 Epoch 18/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3229 - val_loss: 0.3242 Epoch 19/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3216 - val_loss: 0.3804 Epoch 20/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3191 - val_loss: 0.3388 Epoch 21/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3181 - val_loss: 0.3510 Epoch 22/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3160 - val_loss: 0.3075 Epoch 23/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3137 - val_loss: 0.3380 Epoch 24/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3121 - val_loss: 0.3562 Epoch 25/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3102 - val_loss: 0.3158 Epoch 26/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3076 - val_loss: 0.3245 Epoch 27/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3047 - val_loss: 0.4195 Epoch 28/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3096 - val_loss: 0.3261 Epoch 29/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3068 - val_loss: 0.4709 Epoch 30/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3060 - val_loss: 0.3084 Epoch 31/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3013 - val_loss: 0.3303 Epoch 32/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2990 - val_loss: 0.3561 121/121 [==============================] - 0s 2ms/step - loss: 0.3031 [CV] n_neurons=80, n_hidden=3, learning_rate=0.0059640580092043885, total= 23.0s [CV] n_neurons=59, n_hidden=2, learning_rate=0.004591455636549438 .... Epoch 1/100 242/242 [==============================] - 1s 3ms/step - loss: 1.2259 - val_loss: 0.5753 Epoch 2/100 242/242 [==============================] - 1s 3ms/step - loss: 0.5658 - val_loss: 8.9879 Epoch 3/100 242/242 [==============================] - 1s 3ms/step - loss: 0.5484 - val_loss: 11.0986 Epoch 4/100 242/242 [==============================] - 1s 3ms/step - loss: 0.5602 - val_loss: 1.1306 Epoch 5/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4336 - val_loss: 0.5258 Epoch 6/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4072 - val_loss: 0.4499 Epoch 7/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3939 - val_loss: 0.4056 Epoch 8/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3835 - val_loss: 0.3998 Epoch 9/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3766 - val_loss: 0.3957 Epoch 10/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3712 - val_loss: 0.3903 Epoch 11/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3672 - val_loss: 0.3688 Epoch 12/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3630 - val_loss: 0.3651 Epoch 13/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3587 - val_loss: 0.3709 Epoch 14/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3579 - val_loss: 0.3817 Epoch 15/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3535 - val_loss: 0.3623 Epoch 16/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3528 - val_loss: 0.3671 Epoch 17/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3490 - val_loss: 0.3672 Epoch 18/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3475 - val_loss: 0.3606 Epoch 19/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3445 - val_loss: 0.3552 Epoch 20/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3421 - val_loss: 0.3536 Epoch 21/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3416 - val_loss: 0.3519 Epoch 22/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3388 - val_loss: 0.3474 Epoch 23/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3372 - val_loss: 0.3510 Epoch 24/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3374 - val_loss: 0.3304 Epoch 25/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3333 - val_loss: 0.3686 Epoch 26/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3334 - val_loss: 0.3246 Epoch 27/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3336 - val_loss: 0.3387 Epoch 28/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3322 - val_loss: 0.3367 Epoch 29/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3279 - val_loss: 0.3389 Epoch 30/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3276 - val_loss: 0.3209 Epoch 31/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3255 - val_loss: 0.3227 Epoch 32/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3256 - val_loss: 0.3150 Epoch 33/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3238 - val_loss: 0.3511 Epoch 34/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3222 - val_loss: 0.3161 Epoch 35/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3209 - val_loss: 0.3141 Epoch 36/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3191 - val_loss: 0.3843 Epoch 37/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3189 - val_loss: 0.3600 Epoch 38/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3190 - val_loss: 0.3544 Epoch 39/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3159 - val_loss: 0.3168 Epoch 40/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3154 - val_loss: 0.3405 Epoch 41/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3190 - val_loss: 0.3163 Epoch 42/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3150 - val_loss: 0.3164 Epoch 43/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3148 - val_loss: 0.3143 Epoch 44/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3112 - val_loss: 0.3418 Epoch 45/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3144 - val_loss: 0.3057 Epoch 46/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3102 - val_loss: 0.3302 Epoch 47/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3089 - val_loss: 0.3330 Epoch 48/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3118 - val_loss: 0.3175 Epoch 49/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3073 - val_loss: 0.3019 Epoch 50/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3068 - val_loss: 0.3649 Epoch 51/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3061 - val_loss: 0.3269 Epoch 52/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3047 - val_loss: 0.3405 Epoch 53/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3051 - val_loss: 0.3068 Epoch 54/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3034 - val_loss: 0.3089 Epoch 55/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3030 - val_loss: 0.3346 Epoch 56/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3041 - val_loss: 0.3000 Epoch 57/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3014 - val_loss: 0.3707 Epoch 58/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3036 - val_loss: 0.3165 Epoch 59/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3001 - val_loss: 0.2981 Epoch 60/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2991 - val_loss: 0.3177 Epoch 61/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2997 - val_loss: 0.3231 Epoch 62/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2982 - val_loss: 0.3422 Epoch 63/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2984 - val_loss: 0.3082 Epoch 64/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2962 - val_loss: 0.4176 Epoch 65/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2971 - val_loss: 0.3030 Epoch 66/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3034 - val_loss: 0.3343 Epoch 67/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2953 - val_loss: 0.2985 Epoch 68/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2943 - val_loss: 0.3713 Epoch 69/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2958 - val_loss: 0.2969 Epoch 70/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2947 - val_loss: 0.3563 Epoch 71/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2926 - val_loss: 0.2941 Epoch 72/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2912 - val_loss: 0.3500 Epoch 73/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2915 - val_loss: 0.2962 Epoch 74/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2903 - val_loss: 0.4277 Epoch 75/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2917 - val_loss: 0.2926 Epoch 76/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2892 - val_loss: 0.3673 Epoch 77/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2926 - val_loss: 0.3217 Epoch 78/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2894 - val_loss: 0.5634 Epoch 79/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2941 - val_loss: 0.3334 Epoch 80/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2879 - val_loss: 0.5032 Epoch 81/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2882 - val_loss: 0.3119 Epoch 82/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2863 - val_loss: 0.6628 Epoch 83/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2921 - val_loss: 0.6841 Epoch 84/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2900 - val_loss: 0.8619 Epoch 85/100 242/242 [==============================] - 1s 3ms/step - loss: 0.2912 - val_loss: 0.3784 121/121 [==============================] - 0s 2ms/step - loss: 0.3198 [CV] n_neurons=59, n_hidden=2, learning_rate=0.004591455636549438, total= 1.4min [CV] n_neurons=59, n_hidden=2, learning_rate=0.004591455636549438 .... Epoch 1/100 242/242 [==============================] - 1s 3ms/step - loss: 1.1975 - val_loss: 0.8898 Epoch 2/100 242/242 [==============================] - 1s 3ms/step - loss: 0.5319 - val_loss: 0.5270 Epoch 3/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4524 - val_loss: 0.4844 Epoch 4/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4199 - val_loss: 0.4250 Epoch 5/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4023 - val_loss: 0.3735 Epoch 6/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3907 - val_loss: 0.3859 Epoch 7/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3833 - val_loss: 0.4576 Epoch 8/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3754 - val_loss: 0.4928 Epoch 9/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3691 - val_loss: 0.6246 Epoch 10/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3660 - val_loss: 0.5255 Epoch 11/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3611 - val_loss: 0.5956 Epoch 12/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3570 - val_loss: 0.6364 Epoch 13/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3550 - val_loss: 0.7456 Epoch 14/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3529 - val_loss: 0.7136 Epoch 15/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3503 - val_loss: 0.6905 121/121 [==============================] - 0s 2ms/step - loss: 0.3615 [CV] n_neurons=59, n_hidden=2, learning_rate=0.004591455636549438, total= 11.0s [CV] n_neurons=59, n_hidden=2, learning_rate=0.004591455636549438 .... Epoch 1/100 242/242 [==============================] - 1s 3ms/step - loss: 1.1315 - val_loss: 2.8528 Epoch 2/100 242/242 [==============================] - 1s 3ms/step - loss: 0.6016 - val_loss: 2.3412 Epoch 3/100 242/242 [==============================] - 1s 3ms/step - loss: 0.5108 - val_loss: 0.9015 Epoch 4/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4594 - val_loss: 0.8313 Epoch 5/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4291 - val_loss: 0.5217 Epoch 6/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4115 - val_loss: 0.4956 Epoch 7/100 242/242 [==============================] - 1s 3ms/step - loss: 0.4016 - val_loss: 0.3745 Epoch 8/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3917 - val_loss: 0.4012 Epoch 9/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3848 - val_loss: 0.4169 Epoch 10/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3815 - val_loss: 0.3843 Epoch 11/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3756 - val_loss: 0.6122 Epoch 12/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3743 - val_loss: 0.3579 Epoch 13/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3671 - val_loss: 0.3497 Epoch 14/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3632 - val_loss: 0.5161 Epoch 15/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3635 - val_loss: 0.4273 Epoch 16/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3592 - val_loss: 0.5739 Epoch 17/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3601 - val_loss: 0.4975 Epoch 18/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3547 - val_loss: 0.4886 Epoch 19/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3535 - val_loss: 0.3371 Epoch 20/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3485 - val_loss: 0.4118 Epoch 21/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3485 - val_loss: 0.3310 Epoch 22/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3460 - val_loss: 0.3289 Epoch 23/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3435 - val_loss: 0.3287 Epoch 24/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3410 - val_loss: 0.5224 Epoch 25/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3438 - val_loss: 0.7689 Epoch 26/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3423 - val_loss: 0.8909 Epoch 27/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3448 - val_loss: 0.4864 Epoch 28/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3369 - val_loss: 0.6169 Epoch 29/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3373 - val_loss: 0.3470 Epoch 30/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3327 - val_loss: 0.5750 Epoch 31/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3332 - val_loss: 0.3685 Epoch 32/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3308 - val_loss: 0.7292 Epoch 33/100 242/242 [==============================] - 1s 3ms/step - loss: 0.3344 - val_loss: 0.3932 121/121 [==============================] - 0s 2ms/step - loss: 0.3362 [CV] n_neurons=59, n_hidden=2, learning_rate=0.004591455636549438, total= 41.5s Epoch 1/100
[Parallel(n_jobs=1)]: Done 30 out of 30 | elapsed: 16.9min finished
363/363 [==============================] - 1s 2ms/step - loss: 0.8194 - val_loss: 1.8036 Epoch 2/100 363/363 [==============================] - 1s 2ms/step - loss: 0.4857 - val_loss: 2.0827 Epoch 3/100 363/363 [==============================] - 1s 2ms/step - loss: 0.4266 - val_loss: 0.3796 Epoch 4/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3857 - val_loss: 0.4283 Epoch 5/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3740 - val_loss: 0.3617 Epoch 6/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3620 - val_loss: 0.4566 Epoch 7/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3566 - val_loss: 0.3573 Epoch 8/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3492 - val_loss: 0.3380 Epoch 9/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3432 - val_loss: 0.3757 Epoch 10/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3389 - val_loss: 0.4069 Epoch 11/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3380 - val_loss: 0.5469 Epoch 12/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3368 - val_loss: 0.6487 Epoch 13/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3318 - val_loss: 0.3108 Epoch 14/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3257 - val_loss: 0.3198 Epoch 15/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3240 - val_loss: 0.3064 Epoch 16/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3203 - val_loss: 0.3235 Epoch 17/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3174 - val_loss: 0.4025 Epoch 18/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3154 - val_loss: 0.2985 Epoch 19/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3139 - val_loss: 0.3128 Epoch 20/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3112 - val_loss: 0.4343 Epoch 21/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3109 - val_loss: 0.3255 Epoch 22/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3075 - val_loss: 0.5121 Epoch 23/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3065 - val_loss: 0.5495 Epoch 24/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3046 - val_loss: 0.5264 Epoch 25/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3027 - val_loss: 0.3360 Epoch 26/100 363/363 [==============================] - 1s 2ms/step - loss: 0.2997 - val_loss: 0.4583 Epoch 27/100 363/363 [==============================] - 1s 2ms/step - loss: 0.2993 - val_loss: 0.5754 Epoch 28/100 363/363 [==============================] - 1s 2ms/step - loss: 0.3002 - val_loss: 0.4624
RandomizedSearchCV(cv=3, error_score=nan, estimator=<keras.wrappers.scikit_learn.KerasRegressor object at 0x7fe06218a350>, iid='deprecated', n_iter=10, n_jobs=None, param_distributions={'learning_rate': [0.001683454924600351, 0.02390836445593178, 0.008731907739399206, 0.004725396149933917, 0.0006154014789262348, 0.0006153331256530192, 0.0003920021771415983, 0.01619... 0.0024505367684280487, 0.011155092541719619, 0.0007524347058135697, 0.0032032448128444043, 0.004591455636549438, 0.0003715541189658278, ...], 'n_hidden': [0, 1, 2, 3], 'n_neurons': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, ...]}, pre_dispatch='2*n_jobs', random_state=None, refit=True, return_train_score=False, scoring=None, verbose=2)
rnd_search_cv.best_params_
{'learning_rate': 0.005803602934201024, 'n_hidden': 3, 'n_neurons': 74}
rnd_search_cv.best_score_
-0.3189570407072703
rnd_search_cv.best_estimator_
<keras.wrappers.scikit_learn.KerasRegressor at 0x7fe0620abd90>
rnd_search_cv.score(X_test, y_test)
162/162 [==============================] - 0s 1ms/step - loss: 0.3022
-0.30217668414115906
model = rnd_search_cv.best_estimator_.model
model
<keras.engine.sequential.Sequential at 0x7fe0c08fc290>
model.evaluate(X_test, y_test)
162/162 [==============================] - 0s 1ms/step - loss: 0.3022
0.30217668414115906
부록 A 참조.
문제: 심층 MLP를 MNIST 데이터셋에 훈련해보세요(keras.datasets.mnist.load_data()
함수를 사용해 데이터를 적재할 수 있습니다). 98% 이상의 정확도를 얻을 수 있는지 확인해보세요. 이 장에서 소개한 방법을 사용해 최적의 학습률을 찾아보세요(즉 학습률을 지수적으로 증가시키면서 손실을 그래프로 그립니다. 그다음 손실이 다시 증가하는 지점을 찾습니다). 모든 부가 기능을 추가해보세요. 즉, 체크포인트를 저장하고, 조기 종료를 사용하고, 텐서보드를 사용해 학습 곡선을 그려보세요.
데이터셋을 적재해보죠:
(X_train_full, y_train_full), (X_test, y_test) = keras.datasets.mnist.load_data()
Downloading data from https://storage.googleapis.com/tensorflow/tf-keras-datasets/mnist.npz 11493376/11490434 [==============================] - 0s 0us/step 11501568/11490434 [==============================] - 0s 0us/step
패션 MNIST 데이터셋처럼 MNIST 훈련 세트는 28x28 픽셀의 흑백 이미지 60,000개로 이루어져 있습니다:
X_train_full.shape
(60000, 28, 28)
각 픽셀 강도는 바이트(0~255)로 표현됩니다:
X_train_full.dtype
dtype('uint8')
전체 훈련 세트를 검증 세트와 (더 작은) 훈련 세트로 나누어 보겠습니다. 패션 MNIST처럼 픽셀 강도를 255로 나누어 0-1 범위의 실수로 변환합니다:
X_valid, X_train = X_train_full[:5000] / 255., X_train_full[5000:] / 255.
y_valid, y_train = y_train_full[:5000], y_train_full[5000:]
X_test = X_test / 255.
맷플롯립의 imshow()
함수와 'binary'
컬러 맵으로 이미지를 출력해 보죠:
plt.imshow(X_train[0], cmap="binary")
plt.axis('off')
plt.show()
레이블은 (uint8로 표현된) 0에서 9까지 클래스 아이디입니다. 편리하게도 클래스 아이디는 이미지가 나타내는 숫자와 같습니다. 따라서 class_names
배열을 만들 필요가 없습니다:
y_train
array([7, 3, 4, ..., 5, 6, 8], dtype=uint8)
검증 세트는 5,000개의 이미지를 담고 있고 테스트 세트는 10,000개의 이미지를 담고 있습니다:
X_valid.shape
(5000, 28, 28)
X_test.shape
(10000, 28, 28)
이 데이터셋에 있는 이미지 샘플 몇 개를 출력해 보죠:
n_rows = 4
n_cols = 10
plt.figure(figsize=(n_cols * 1.2, n_rows * 1.2))
for row in range(n_rows):
for col in range(n_cols):
index = n_cols * row + col
plt.subplot(n_rows, n_cols, index + 1)
plt.imshow(X_train[index], cmap="binary", interpolation="nearest")
plt.axis('off')
plt.title(y_train[index], fontsize=12)
plt.subplots_adjust(wspace=0.2, hspace=0.5)
plt.show()
간단한 밀집 신경망을 만들고 최적의 학습률을 찾아 보겠습니다. 반복마다 학습률을 증가시키기 위해 콜백을 사용합니다. 이 콜백은 반복마다 학습률과 손실을 기록합니다:
K = keras.backend
class ExponentialLearningRate(keras.callbacks.Callback):
def __init__(self, factor):
self.factor = factor
self.rates = []
self.losses = []
def on_batch_end(self, batch, logs):
self.rates.append(K.get_value(self.model.optimizer.lr))
self.losses.append(logs["loss"])
K.set_value(self.model.optimizer.lr, self.model.optimizer.lr * self.factor)
keras.backend.clear_session()
np.random.seed(42)
tf.random.set_seed(42)
model = keras.models.Sequential([
keras.layers.Flatten(input_shape=[28, 28]),
keras.layers.Dense(300, activation="relu"),
keras.layers.Dense(100, activation="relu"),
keras.layers.Dense(10, activation="softmax")
])
작은 학습률 1e-3에서 시작하여 반복마다 0.5%씩 증가합니다:
model.compile(loss="sparse_categorical_crossentropy",
optimizer=keras.optimizers.SGD(learning_rate=1e-3),
metrics=["accuracy"])
expon_lr = ExponentialLearningRate(factor=1.005)
/usr/local/lib/python3.7/dist-packages/keras/optimizer_v2/optimizer_v2.py:356: UserWarning: The `lr` argument is deprecated, use `learning_rate` instead. "The `lr` argument is deprecated, use `learning_rate` instead.")
모델을 1 에포크만 훈련해 보죠:
history = model.fit(X_train, y_train, epochs=1,
validation_data=(X_valid, y_valid),
callbacks=[expon_lr])
1/1719 [..............................] - ETA: 6:39 - loss: 2.5649 - accuracy: 0.0625WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0019s vs `on_train_batch_end` time: 0.0023s). Check your callbacks. 1719/1719 [==============================] - 6s 3ms/step - loss: nan - accuracy: 0.5935 - val_loss: nan - val_accuracy: 0.0958
학습률에 대한 함수로 손실을 그릴 수 있습니다:
plt.plot(expon_lr.rates, expon_lr.losses)
plt.gca().set_xscale('log')
plt.hlines(min(expon_lr.losses), min(expon_lr.rates), max(expon_lr.rates))
plt.axis([min(expon_lr.rates), max(expon_lr.rates), 0, expon_lr.losses[0]])
plt.grid()
plt.xlabel("Learning rate")
plt.ylabel("Loss")
Text(0, 0.5, 'Loss')
학습률이 6e-1을 지날 떄 손실이 갑자기 솟구치기 때문에 3e-1을 학습률로 사용하겠습니다:
keras.backend.clear_session()
np.random.seed(42)
tf.random.set_seed(42)
model = keras.models.Sequential([
keras.layers.Flatten(input_shape=[28, 28]),
keras.layers.Dense(300, activation="relu"),
keras.layers.Dense(100, activation="relu"),
keras.layers.Dense(10, activation="softmax")
])
model.compile(loss="sparse_categorical_crossentropy",
optimizer=keras.optimizers.SGD(learning_rate=3e-1),
metrics=["accuracy"])
/usr/local/lib/python3.7/dist-packages/keras/optimizer_v2/optimizer_v2.py:356: UserWarning: The `lr` argument is deprecated, use `learning_rate` instead. "The `lr` argument is deprecated, use `learning_rate` instead.")
run_index = 1 # 실행할 때마다 이 값을 늘립니다
run_logdir = os.path.join(os.curdir, "my_mnist_logs", "run_{:03d}".format(run_index))
run_logdir
'./my_mnist_logs/run_001'
early_stopping_cb = keras.callbacks.EarlyStopping(patience=20)
checkpoint_cb = keras.callbacks.ModelCheckpoint("my_mnist_model.h5", save_best_only=True)
tensorboard_cb = keras.callbacks.TensorBoard(run_logdir)
history = model.fit(X_train, y_train, epochs=100,
validation_data=(X_valid, y_valid),
callbacks=[checkpoint_cb, early_stopping_cb, tensorboard_cb])
Epoch 1/100 1719/1719 [==============================] - 5s 3ms/step - loss: 0.2363 - accuracy: 0.9264 - val_loss: 0.1022 - val_accuracy: 0.9696 Epoch 2/100 1719/1719 [==============================] - 4s 2ms/step - loss: 0.0944 - accuracy: 0.9703 - val_loss: 0.0907 - val_accuracy: 0.9730 Epoch 3/100 1719/1719 [==============================] - 3s 2ms/step - loss: 0.0676 - accuracy: 0.9785 - val_loss: 0.0840 - val_accuracy: 0.9754 Epoch 4/100 1719/1719 [==============================] - 4s 2ms/step - loss: 0.0482 - accuracy: 0.9844 - val_loss: 0.0695 - val_accuracy: 0.9812 Epoch 5/100 1719/1719 [==============================] - 4s 2ms/step - loss: 0.0342 - accuracy: 0.9890 - val_loss: 0.0813 - val_accuracy: 0.9808 Epoch 6/100 1719/1719 [==============================] - 3s 2ms/step - loss: 0.0303 - accuracy: 0.9898 - val_loss: 0.0692 - val_accuracy: 0.9828 Epoch 7/100 1719/1719 [==============================] - 3s 2ms/step - loss: 0.0237 - accuracy: 0.9920 - val_loss: 0.0810 - val_accuracy: 0.9812 Epoch 8/100 1719/1719 [==============================] - 4s 2ms/step - loss: 0.0193 - accuracy: 0.9935 - val_loss: 0.0863 - val_accuracy: 0.9764 Epoch 9/100 1719/1719 [==============================] - 4s 2ms/step - loss: 0.0185 - accuracy: 0.9937 - val_loss: 0.0949 - val_accuracy: 0.9798 Epoch 10/100 1719/1719 [==============================] - 4s 2ms/step - loss: 0.0141 - accuracy: 0.9951 - val_loss: 0.0842 - val_accuracy: 0.9828 Epoch 11/100 1719/1719 [==============================] - 3s 2ms/step - loss: 0.0144 - accuracy: 0.9955 - val_loss: 0.1024 - val_accuracy: 0.9798 Epoch 12/100 1719/1719 [==============================] - 4s 2ms/step - loss: 0.0081 - accuracy: 0.9973 - val_loss: 0.1284 - val_accuracy: 0.9728 Epoch 13/100 1719/1719 [==============================] - 3s 2ms/step - loss: 0.0078 - accuracy: 0.9976 - val_loss: 0.1002 - val_accuracy: 0.9820 Epoch 14/100 1719/1719 [==============================] - 3s 2ms/step - loss: 0.0129 - accuracy: 0.9959 - val_loss: 0.0897 - val_accuracy: 0.9836 Epoch 15/100 1719/1719 [==============================] - 4s 2ms/step - loss: 0.0083 - accuracy: 0.9974 - val_loss: 0.0812 - val_accuracy: 0.9858 Epoch 16/100 1719/1719 [==============================] - 4s 2ms/step - loss: 0.0095 - accuracy: 0.9967 - val_loss: 0.1058 - val_accuracy: 0.9814 Epoch 17/100 1719/1719 [==============================] - 4s 2ms/step - loss: 0.0080 - accuracy: 0.9974 - val_loss: 0.0867 - val_accuracy: 0.9862 Epoch 18/100 1719/1719 [==============================] - 4s 2ms/step - loss: 0.0027 - accuracy: 0.9992 - val_loss: 0.0997 - val_accuracy: 0.9850 Epoch 19/100 1719/1719 [==============================] - 3s 2ms/step - loss: 0.0043 - accuracy: 0.9987 - val_loss: 0.0981 - val_accuracy: 0.9842 Epoch 20/100 1719/1719 [==============================] - 4s 2ms/step - loss: 0.0016 - accuracy: 0.9995 - val_loss: 0.0913 - val_accuracy: 0.9856 Epoch 21/100 1719/1719 [==============================] - 3s 2ms/step - loss: 3.5843e-04 - accuracy: 0.9999 - val_loss: 0.0860 - val_accuracy: 0.9862 Epoch 22/100 1719/1719 [==============================] - 4s 2ms/step - loss: 1.0448e-04 - accuracy: 1.0000 - val_loss: 0.0844 - val_accuracy: 0.9878 Epoch 23/100 1719/1719 [==============================] - 3s 2ms/step - loss: 5.7757e-05 - accuracy: 1.0000 - val_loss: 0.0855 - val_accuracy: 0.9876 Epoch 24/100 1719/1719 [==============================] - 3s 2ms/step - loss: 4.7349e-05 - accuracy: 1.0000 - val_loss: 0.0859 - val_accuracy: 0.9876 Epoch 25/100 1719/1719 [==============================] - 4s 2ms/step - loss: 4.1029e-05 - accuracy: 1.0000 - val_loss: 0.0863 - val_accuracy: 0.9876 Epoch 26/100 1719/1719 [==============================] - 4s 2ms/step - loss: 3.6531e-05 - accuracy: 1.0000 - val_loss: 0.0869 - val_accuracy: 0.9874
model = keras.models.load_model("my_mnist_model.h5") # rollback to best model
model.evaluate(X_test, y_test)
313/313 [==============================] - 1s 2ms/step - loss: 0.0716 - accuracy: 0.9792
[0.07158412784337997, 0.979200005531311]
98% 정확도를 얻었습니다. 마지막으로 텐서보드를 사용해 학습 곡선을 살펴보겠습니다:
%tensorboard --logdir=./my_mnist_logs --port=6007