import numpy as np
import scipy.misc
import glob
import keras
from keras import backend as K
from keras.layers import Conv2D, MaxPooling2D, Input
from keras.layers import Dense, Dropout, Flatten
from keras.layers.merge import Concatenate
from keras.models import Sequential, Model
from keras.preprocessing.image import ImageDataGenerator
from sklearn.model_selection import train_test_split
from io import BytesIO
from IPython import display
from PIL import Image
def display_image(arr):
b = BytesIO()
Image.fromarray(np.uint8(arr)).save(b, format='png')
data = b.getvalue()
display.display(display.Image(data=data, format='png', embed=True))
labels = ['gyoku', 'ou', 'kin', 'gin', 'kei', 'kyo', 'kaku', 'hi', 'fu', 'narigin', 'narikei', 'narikyo', 'uma', 'ryu', 'to']
num_classes = len(labels) * 2 + 1
# input image dimensions
img_rows, img_cols = 64, 64
input_shape = (img_rows, img_cols, 1)
series_imgs = []
series_labels = []
for i, series in enumerate(sorted(glob.glob("../data/koma/*.npz"))):
file = np.load(series)
print(series)
series_imgs.append(file['imgs'].astype(np.float32))
series_labels.append(file['labels'])
num_series = len(series_imgs)
Using TensorFlow backend.
../data/koma/01.npz ../data/koma/02.npz ../data/koma/03.npz ../data/koma/04.npz ../data/koma/05.npz ../data/koma/06.npz ../data/koma/07.npz ../data/koma/08.npz ../data/koma/09.npz ../data/koma/10.npz ../data/koma/11.npz ../data/koma/12.npz ../data/koma/13.npz ../data/koma/14.npz ../data/koma/15.npz ../data/koma/16.npz ../data/koma/17.npz ../data/koma/18.npz ../data/koma/19.npz ../data/koma/20.npz ../data/koma/21.npz ../data/koma/22.npz ../data/koma/23.npz ../data/koma/24.npz ../data/koma/25.npz ../data/koma/26.npz ../data/koma/27.npz
# load all
x_all = np.empty((0, img_rows, img_cols))
y_all = np.empty((0, 2), np.int32)
for i in range(num_series):
num_imgs = len(series_imgs[i])
x_all = np.r_[x_all, series_imgs[i]]
label_indices = []
for j, label in enumerate(series_labels[i]):
label_indices.append(labels.index(label))
label_and_series = np.c_[label_indices, np.full((num_imgs), i)]
y_all = np.r_[y_all, label_and_series]
# rot 180
x_all = np.r_[x_all, np.rot90(x_all, 2, (1, 2))]
y_rot = np.copy(y_all)
y_rot[:, 0] += len(labels)
y_all = np.r_[y_all, y_rot]
# space
space_imgs = np.load('../learn/space/space.npz')['imgs'].astype(np.float32)
x_all = np.r_[x_all, space_imgs]
y_space = np.full((len(space_imgs), 2), [len(labels) * 2, num_series])
y_all = np.r_[y_all, y_space]
print(x_all.shape)
print(y_all.shape)
# reshape to TF
x_all = x_all.reshape(x_all.shape[0], img_rows, img_cols, 1)
x_all = x_all.astype(np.float32)
x_all /= 255
(760, 64, 64) (760, 2)
x_train, x_test, y_train, y_test = train_test_split(x_all, y_all, test_size=0.05)
# train_ids = np.where(y_all[:, 1] != 20)
# test_ids = np.where(y_all[:, 1] == 20)
# x_train = x_all[train_ids]
# x_test = x_all[test_ids]
# y_train = y_all[train_ids]
# y_test = y_all[test_ids]
# print(test_ids)
# display_image(np.uint8(x_test[0] * 255)[:, :, 0])
# convert class vectors to binary class matrices
y_train = keras.utils.to_categorical(y_train[:, 0], num_classes)
y_test = keras.utils.to_categorical(y_test[:, 0], num_classes)
datagen = ImageDataGenerator(rotation_range=15, zoom_range=0.25)
datagen.fit(x_train)
model = Sequential()
model.add(Conv2D(32, kernel_size=(3, 3), activation='relu', input_shape=input_shape))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.2))
model.add(Conv2D(64, kernel_size=(3, 3), activation='relu', input_shape=input_shape))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.3))
model.add(Conv2D(128, kernel_size=(3, 3), activation='relu', input_shape=input_shape))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.4))
model.add(Flatten())
model.add(Dense(256, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(256, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes, activation='softmax'))
model.summary()
_________________________________________________________________ Layer (type) Output Shape Param # ================================================================= conv2d_1 (Conv2D) (None, 62, 62, 32) 320 _________________________________________________________________ max_pooling2d_1 (MaxPooling2 (None, 31, 31, 32) 0 _________________________________________________________________ dropout_1 (Dropout) (None, 31, 31, 32) 0 _________________________________________________________________ conv2d_2 (Conv2D) (None, 29, 29, 64) 18496 _________________________________________________________________ max_pooling2d_2 (MaxPooling2 (None, 14, 14, 64) 0 _________________________________________________________________ dropout_2 (Dropout) (None, 14, 14, 64) 0 _________________________________________________________________ conv2d_3 (Conv2D) (None, 12, 12, 128) 73856 _________________________________________________________________ max_pooling2d_3 (MaxPooling2 (None, 6, 6, 128) 0 _________________________________________________________________ dropout_3 (Dropout) (None, 6, 6, 128) 0 _________________________________________________________________ flatten_1 (Flatten) (None, 4608) 0 _________________________________________________________________ dense_1 (Dense) (None, 256) 1179904 _________________________________________________________________ dropout_4 (Dropout) (None, 256) 0 _________________________________________________________________ dense_2 (Dense) (None, 256) 65792 _________________________________________________________________ dropout_5 (Dropout) (None, 256) 0 _________________________________________________________________ dense_3 (Dense) (None, 31) 7967 ================================================================= Total params: 1,346,335 Trainable params: 1,346,335 Non-trainable params: 0 _________________________________________________________________
model.compile(loss=keras.losses.categorical_crossentropy,
optimizer=keras.optimizers.Adadelta(),
metrics=['accuracy'])
model.fit_generator(datagen.flow(x_train, y_train, batch_size=16), steps_per_epoch=x_train.shape[0],
epochs=80, verbose=1, validation_data=(x_test, y_test))
score = model.evaluate(x_test, y_test, verbose=0)
print('Test loss:', score[0])
print('Test accuracy:', score[1])
Epoch 1/80 722/722 [==============================] - 12s - loss: 3.4137 - acc: 0.0367 - val_loss: 3.2081 - val_acc: 0.1316 Epoch 2/80 722/722 [==============================] - 10s - loss: 2.7905 - acc: 0.1454 - val_loss: 2.3009 - val_acc: 0.2368 Epoch 3/80 722/722 [==============================] - 10s - loss: 2.1942 - acc: 0.2745 - val_loss: 1.8328 - val_acc: 0.4211 Epoch 4/80 722/722 [==============================] - 10s - loss: 1.8364 - acc: 0.3715 - val_loss: 1.4315 - val_acc: 0.5789 Epoch 5/80 722/722 [==============================] - 10s - loss: 1.5310 - acc: 0.4745 - val_loss: 1.5181 - val_acc: 0.5789 Epoch 6/80 722/722 [==============================] - 10s - loss: 1.2807 - acc: 0.5492 - val_loss: 1.0455 - val_acc: 0.6053 Epoch 7/80 722/722 [==============================] - 10s - loss: 1.0845 - acc: 0.6184 - val_loss: 1.0110 - val_acc: 0.6053 Epoch 8/80 722/722 [==============================] - 10s - loss: 0.9034 - acc: 0.6813 - val_loss: 0.8776 - val_acc: 0.6579 Epoch 9/80 722/722 [==============================] - 10s - loss: 0.7880 - acc: 0.7196 - val_loss: 0.8537 - val_acc: 0.6842 Epoch 10/80 722/722 [==============================] - 10s - loss: 0.6903 - acc: 0.7525 - val_loss: 0.7707 - val_acc: 0.7105 Epoch 11/80 722/722 [==============================] - 10s - loss: 0.6127 - acc: 0.7794 - val_loss: 0.6713 - val_acc: 0.7105 Epoch 12/80 722/722 [==============================] - 10s - loss: 0.5556 - acc: 0.7982 - val_loss: 0.7441 - val_acc: 0.7105 Epoch 13/80 722/722 [==============================] - 10s - loss: 0.4840 - acc: 0.8263 - val_loss: 0.7451 - val_acc: 0.7368 Epoch 14/80 722/722 [==============================] - 10s - loss: 0.4563 - acc: 0.8373 - val_loss: 0.7488 - val_acc: 0.7105 Epoch 15/80 722/722 [==============================] - 10s - loss: 0.4087 - acc: 0.8468 - val_loss: 0.7775 - val_acc: 0.7368 Epoch 16/80 722/722 [==============================] - 10s - loss: 0.3906 - acc: 0.8547 - val_loss: 0.7144 - val_acc: 0.7632 Epoch 17/80 722/722 [==============================] - 10s - loss: 0.3615 - acc: 0.8680 - val_loss: 0.7146 - val_acc: 0.7368 Epoch 18/80 722/722 [==============================] - 10s - loss: 0.3323 - acc: 0.8801 - val_loss: 0.7592 - val_acc: 0.7368 Epoch 19/80 722/722 [==============================] - 10s - loss: 0.3287 - acc: 0.8850 - val_loss: 0.7124 - val_acc: 0.7632 Epoch 20/80 722/722 [==============================] - 10s - loss: 0.2867 - acc: 0.8953 - val_loss: 0.6493 - val_acc: 0.7632 Epoch 21/80 722/722 [==============================] - 10s - loss: 0.2854 - acc: 0.8993 - val_loss: 0.7811 - val_acc: 0.7632 Epoch 22/80 722/722 [==============================] - 10s - loss: 0.2598 - acc: 0.9052 - val_loss: 0.6462 - val_acc: 0.7632 Epoch 23/80 722/722 [==============================] - 10s - loss: 0.2523 - acc: 0.9072 - val_loss: 0.6441 - val_acc: 0.7895 Epoch 24/80 722/722 [==============================] - 10s - loss: 0.2312 - acc: 0.9187 - val_loss: 0.5554 - val_acc: 0.8158 Epoch 25/80 722/722 [==============================] - 10s - loss: 0.2338 - acc: 0.9219 - val_loss: 0.5886 - val_acc: 0.8421 Epoch 26/80 722/722 [==============================] - 10s - loss: 0.2214 - acc: 0.9266 - val_loss: 0.6649 - val_acc: 0.7895 Epoch 27/80 722/722 [==============================] - 10s - loss: 0.2079 - acc: 0.9258 - val_loss: 0.6159 - val_acc: 0.8158 Epoch 28/80 722/722 [==============================] - 10s - loss: 0.1968 - acc: 0.9317 - val_loss: 0.5634 - val_acc: 0.8158 Epoch 29/80 722/722 [==============================] - 10s - loss: 0.1937 - acc: 0.9371 - val_loss: 0.5822 - val_acc: 0.8421 Epoch 30/80 722/722 [==============================] - 10s - loss: 0.2074 - acc: 0.9292 - val_loss: 0.5156 - val_acc: 0.8421 Epoch 31/80 722/722 [==============================] - 10s - loss: 0.1885 - acc: 0.9386 - val_loss: 0.6156 - val_acc: 0.8158 Epoch 32/80 722/722 [==============================] - 10s - loss: 0.1768 - acc: 0.9407 - val_loss: 0.5264 - val_acc: 0.8421 Epoch 33/80 722/722 [==============================] - 10s - loss: 0.1713 - acc: 0.9443 - val_loss: 0.5200 - val_acc: 0.8158 Epoch 34/80 722/722 [==============================] - 10s - loss: 0.1700 - acc: 0.9432 - val_loss: 0.5993 - val_acc: 0.8421 Epoch 35/80 722/722 [==============================] - 10s - loss: 0.1612 - acc: 0.9456 - val_loss: 0.5267 - val_acc: 0.8158 Epoch 36/80 722/722 [==============================] - 10s - loss: 0.1513 - acc: 0.9510 - val_loss: 0.5298 - val_acc: 0.8421 Epoch 37/80 722/722 [==============================] - 10s - loss: 0.1565 - acc: 0.9496 - val_loss: 0.5455 - val_acc: 0.8421 Epoch 38/80 722/722 [==============================] - 10s - loss: 0.1459 - acc: 0.9534 - val_loss: 0.4869 - val_acc: 0.8684 Epoch 39/80 722/722 [==============================] - 10s - loss: 0.1432 - acc: 0.9540 - val_loss: 0.5208 - val_acc: 0.8684 Epoch 40/80 722/722 [==============================] - 10s - loss: 0.1358 - acc: 0.9558 - val_loss: 0.5364 - val_acc: 0.8421 Epoch 41/80 722/722 [==============================] - 10s - loss: 0.1431 - acc: 0.9534 - val_loss: 0.5587 - val_acc: 0.8421 Epoch 42/80 722/722 [==============================] - 10s - loss: 0.1363 - acc: 0.9550 - val_loss: 0.5734 - val_acc: 0.8421 Epoch 43/80 722/722 [==============================] - 10s - loss: 0.1349 - acc: 0.9569 - val_loss: 0.5281 - val_acc: 0.8421 Epoch 44/80 722/722 [==============================] - 10s - loss: 0.1283 - acc: 0.9600 - val_loss: 0.4967 - val_acc: 0.8421 Epoch 45/80 722/722 [==============================] - 10s - loss: 0.1295 - acc: 0.9572 - val_loss: 0.5608 - val_acc: 0.8684 Epoch 46/80 722/722 [==============================] - 10s - loss: 0.1317 - acc: 0.9565 - val_loss: 0.5007 - val_acc: 0.8947 Epoch 47/80 722/722 [==============================] - 10s - loss: 0.1306 - acc: 0.9594 - val_loss: 0.4285 - val_acc: 0.8947 Epoch 48/80 722/722 [==============================] - 10s - loss: 0.1291 - acc: 0.9619 - val_loss: 0.5611 - val_acc: 0.8421 Epoch 49/80 722/722 [==============================] - 10s - loss: 0.1269 - acc: 0.9590 - val_loss: 0.3912 - val_acc: 0.8947 Epoch 50/80 722/722 [==============================] - 10s - loss: 0.1313 - acc: 0.9598 - val_loss: 0.4277 - val_acc: 0.8684 Epoch 51/80 722/722 [==============================] - 10s - loss: 0.1233 - acc: 0.9610 - val_loss: 0.4884 - val_acc: 0.8947 Epoch 52/80 722/722 [==============================] - 10s - loss: 0.1216 - acc: 0.9630 - val_loss: 0.4594 - val_acc: 0.8684 Epoch 53/80 722/722 [==============================] - 10s - loss: 0.1185 - acc: 0.9635 - val_loss: 0.4171 - val_acc: 0.8947 Epoch 54/80 722/722 [==============================] - 10s - loss: 0.1219 - acc: 0.9613 - val_loss: 0.3912 - val_acc: 0.8947 Epoch 55/80 722/722 [==============================] - 10s - loss: 0.1223 - acc: 0.9623 - val_loss: 0.4152 - val_acc: 0.8684 Epoch 56/80 722/722 [==============================] - 10s - loss: 0.1293 - acc: 0.9592 - val_loss: 0.3840 - val_acc: 0.8947 Epoch 57/80 722/722 [==============================] - 10s - loss: 0.1192 - acc: 0.9630 - val_loss: 0.3830 - val_acc: 0.8947 Epoch 58/80 722/722 [==============================] - 10s - loss: 0.1101 - acc: 0.9656 - val_loss: 0.5646 - val_acc: 0.8421 Epoch 59/80 722/722 [==============================] - 10s - loss: 0.1231 - acc: 0.9633 - val_loss: 0.4475 - val_acc: 0.8684 Epoch 60/80 722/722 [==============================] - 10s - loss: 0.1254 - acc: 0.9631 - val_loss: 0.3646 - val_acc: 0.8684 Epoch 61/80 722/722 [==============================] - 10s - loss: 0.1271 - acc: 0.9605 - val_loss: 0.4975 - val_acc: 0.8684 Epoch 62/80 722/722 [==============================] - 10s - loss: 0.1194 - acc: 0.9654 - val_loss: 0.4843 - val_acc: 0.8684 Epoch 63/80 722/722 [==============================] - 10s - loss: 0.1197 - acc: 0.9657 - val_loss: 0.3735 - val_acc: 0.8947 Epoch 64/80 722/722 [==============================] - 10s - loss: 0.1212 - acc: 0.9651 - val_loss: 0.5201 - val_acc: 0.8684 Epoch 65/80 722/722 [==============================] - 10s - loss: 0.1129 - acc: 0.9661 - val_loss: 0.4702 - val_acc: 0.8421 Epoch 66/80 722/722 [==============================] - 10s - loss: 0.1212 - acc: 0.9641 - val_loss: 0.3151 - val_acc: 0.8421 Epoch 67/80 722/722 [==============================] - 10s - loss: 0.1267 - acc: 0.9636 - val_loss: 0.3659 - val_acc: 0.8684 Epoch 68/80 722/722 [==============================] - 10s - loss: 0.1092 - acc: 0.9655 - val_loss: 0.5439 - val_acc: 0.8421 Epoch 69/80 722/722 [==============================] - 10s - loss: 0.1206 - acc: 0.9629 - val_loss: 0.2749 - val_acc: 0.8947 Epoch 70/80 722/722 [==============================] - 10s - loss: 0.1225 - acc: 0.9650 - val_loss: 0.3803 - val_acc: 0.8947 Epoch 71/80 722/722 [==============================] - 10s - loss: 0.1179 - acc: 0.9662 - val_loss: 0.4362 - val_acc: 0.8947 Epoch 72/80 722/722 [==============================] - 10s - loss: 0.1168 - acc: 0.9657 - val_loss: 0.4263 - val_acc: 0.8684 Epoch 73/80 722/722 [==============================] - 10s - loss: 0.1250 - acc: 0.9636 - val_loss: 0.3945 - val_acc: 0.8684 Epoch 74/80 722/722 [==============================] - 10s - loss: 0.1283 - acc: 0.9655 - val_loss: 0.3728 - val_acc: 0.8684 Epoch 75/80 722/722 [==============================] - 10s - loss: 0.1318 - acc: 0.9659 - val_loss: 0.4303 - val_acc: 0.8684 Epoch 76/80 722/722 [==============================] - 10s - loss: 0.1259 - acc: 0.9651 - val_loss: 0.3474 - val_acc: 0.8684 Epoch 77/80 722/722 [==============================] - 10s - loss: 0.1261 - acc: 0.9630 - val_loss: 0.5118 - val_acc: 0.8684 Epoch 78/80 722/722 [==============================] - 10s - loss: 0.1252 - acc: 0.9655 - val_loss: 0.4758 - val_acc: 0.8684 Epoch 79/80 722/722 [==============================] - 10s - loss: 0.1238 - acc: 0.9633 - val_loss: 0.3358 - val_acc: 0.8947 Epoch 80/80 722/722 [==============================] - 10s - loss: 0.1321 - acc: 0.9629 - val_loss: 0.3712 - val_acc: 0.8684 Test loss: 0.371219977344 Test accuracy: 0.868421058906
pred_res = model.predict(x_test)
res = np.array([np.argmax(r) for r in pred_res])
ans = np.array([np.where(y == 1)[0][0] for y in y_test])
print(res)
print(ans)
correct = np.where(res == ans)[0]
errors = np.where(res != ans)[0]
print(errors)
[29 25 29 5 27 20 15 3 13 26 23 12 25 12 30 16 18 2 7 7 20 25 5 30 25 22 7 1 7 27 11 1 6 6 0 29 17 9] [29 25 29 13 27 20 15 3 13 26 23 12 25 8 30 15 18 2 7 7 20 26 5 30 25 22 7 1 7 27 11 1 6 6 0 29 15 9] [ 3 13 15 21 36]
for e in errors:
display_image(np.uint8(x_test[e] * 255)[:, :, 0])
if len(labels) > res[e]:
print(labels[res[e]])
else:
print("p")
print(labels[res[e] - len(labels)])
kyo
uma
p ou
p narikei
p kin
for e in correct:
display_image(np.uint8(x_test[e] * 255)[:, :, 0])
if len(labels) > res[e]:
print(labels[res[e]])
elif len(labels) * 2 > res[e]:
print("*" + labels[res[e] - len(labels)])
else:
print(" ")
*to
*narikei
*to
*uma
*kyo
*gyoku
gin
ryu
*narikyo
*fu
uma
*narikei
*gin
kin
hi
hi
*kyo
kyo
*narikei
*hi
hi
ou
hi
*uma
narikyo
ou
kaku
kaku
gyoku
*to
narigin