In [1]:
from keras.layers.core import Dense, Activation
from keras.models import Sequential
from keras.optimizers import Adam
from sklearn import datasets, preprocessing
import numpy as np
Using TensorFlow backend.

MNIST - Keras (Backend: TensorFlow)

以下のTensorFlowのTutorialと見比べてみて下さい
http://www.tensorflow.org/tutorials/mnist/beginners/index.html

MNIST Data

In [2]:
# データセットの用意
mnist = datasets.fetch_mldata('MNIST original')

X = mnist.data.astype(float)
X /= 255

y = preprocessing.LabelBinarizer().fit_transform(mnist.target)

# 訓練用とテスト用にデータを分割
train_size = 60000

X_train, X_test = np.split(X, [train_size])
y_train, y_test = np.split(y, [train_size])

Implementing the Regression

In [3]:
model = Sequential()
model.add(Dense(10, input_shape=(784,), init='zero'))
model.add(Activation("softmax"))

Training

In [4]:
# モデルをコンパイル
model.compile(loss='categorical_crossentropy', optimizer=Adam(lr=0.01))
In [5]:
model.fit(X_train, y_train, nb_epoch=10, batch_size=100)
Epoch 1/10
60000/60000 [==============================] - 4s - loss: 0.3547     
Epoch 2/10
60000/60000 [==============================] - 3s - loss: 0.2955     
Epoch 3/10
60000/60000 [==============================] - 3s - loss: 0.2883     
Epoch 4/10
60000/60000 [==============================] - 3s - loss: 0.2847     
Epoch 5/10
60000/60000 [==============================] - 3s - loss: 0.2812     
Epoch 6/10
60000/60000 [==============================] - 3s - loss: 0.2777     
Epoch 7/10
60000/60000 [==============================] - 3s - loss: 0.2739     
Epoch 8/10
60000/60000 [==============================] - 4s - loss: 0.2730     
Epoch 9/10
60000/60000 [==============================] - 4s - loss: 0.2691     
Epoch 10/10
60000/60000 [==============================] - 4s - loss: 0.2715     
Out[5]:
<keras.callbacks.History at 0x13045ce50>

Evaluating

In [6]:
model.evaluate(X_test, y_test, show_accuracy=True)
10000/10000 [==============================] - 0s     
Out[6]:
[0.32320401608347893, 0.9204]

Accuracy: 0.9204