#KerasTuner requires Python 3.6+ and TensorFlow 2.0+.
!pip install keras-tuner --upgrade
Requirement already satisfied: keras-tuner in /usr/local/lib/python3.10/dist-packages (1.4.6) Requirement already satisfied: keras in /usr/local/lib/python3.10/dist-packages (from keras-tuner) (2.14.0) Requirement already satisfied: packaging in /usr/local/lib/python3.10/dist-packages (from keras-tuner) (23.2) Requirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from keras-tuner) (2.31.0) Requirement already satisfied: kt-legacy in /usr/local/lib/python3.10/dist-packages (from keras-tuner) (1.0.5) Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests->keras-tuner) (3.3.2) Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->keras-tuner) (3.4) Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests->keras-tuner) (2.0.7) Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->keras-tuner) (2023.7.22)
from tensorflow import keras
import keras_tuner
import numpy as np
(x, y), (x_test, y_test) = keras.datasets.mnist.load_data()
x_train = x[:-10000]
x_val = x[-10000:]
y_train = y[:-10000]
y_val = y[-10000:]
x_train = np.expand_dims(x_train, -1).astype("float32") / 255.0
x_val = np.expand_dims(x_val, -1).astype("float32") / 255.0
x_test = np.expand_dims(x_test, -1).astype("float32") / 255.0
hp.Int('units', min_value=32, max_value=512, step=32)
, whose range is from 32 to 512 inclusive.
When sampling from it, the minimum step for walking through the interval is 32.
def build_model(hp):
model = keras.Sequential()
model.add(keras.layers.Flatten())
model.add(
keras.layers.Dense(
# Define the hyperparameter.
units=hp.Int("units", min_value=32, max_value=512, step=32),
activation=hp.Choice("activation", ["relu", "LeakyReLU"]),
)
)
model.add(keras.layers.Dense(10, activation="softmax"))
model.compile(optimizer="adam", loss="SparseCategoricalCrossentropy", metrics=["accuracy"])
return model
tuner = keras_tuner.RandomSearch(
build_model,
objective='val_accuracy',
max_trials=5)
tuner.search_space_summary()
Search space summary Default search space size: 2 units (Int) {'default': None, 'conditions': [], 'min_value': 32, 'max_value': 512, 'step': 32, 'sampling': 'linear'} activation (Choice) {'default': 'relu', 'conditions': [], 'values': ['relu', 'LeakyReLU'], 'ordered': False}
tuner.search(x_train, y_train, epochs=3, validation_data=(x_val, y_val))
Trial 5 Complete [00h 00m 42s] val_accuracy: 0.9736999869346619 Best val_accuracy So Far: 0.9736999869346619 Total elapsed time: 00h 02m 37s
best_model = tuner.get_best_models()[0]
tuner.results_summary()
Results summary Results in ./untitled_project Showing 10 best trials Objective(name="val_accuracy", direction="max") Trial 4 summary Hyperparameters: units: 224 activation: relu Score: 0.9736999869346619 Trial 0 summary Hyperparameters: units: 288 activation: LeakyReLU Score: 0.9679999947547913 Trial 3 summary Hyperparameters: units: 448 activation: LeakyReLU Score: 0.9668999910354614 Trial 1 summary Hyperparameters: units: 352 activation: LeakyReLU Score: 0.9656000137329102 Trial 2 summary Hyperparameters: units: 32 activation: LeakyReLU Score: 0.9531000256538391
best_model.evaluate(x_test, y_test)
313/313 [==============================] - 1s 2ms/step - loss: 0.0893 - accuracy: 0.9718
[0.08932860195636749, 0.9718000292778015]