import numpy as np
# This is our initial data; one entry per "sample"
# (in this toy example, a "sample" is just a sentence, but
# it could be an entire document).
samples = ['The cat sat on the mat.', 'The dog ate my homework.']
# First, build an index of all tokens in the data.
token_index = {}
for sample in samples:
# We simply tokenize the samples via the `split` method.
# in real life, we would also strip punctuation and special characters
# from the samples.
for word in sample.split():
if word not in token_index:
# Assign a unique index to each unique word
token_index[word] = len(token_index) + 1
# Note that we don't attribute index 0 to anything.
# Next, we vectorize our samples.
# We will only consider the first `max_length` words in each sample.
max_length = 10
# This is where we store our results:
results = np.zeros((len(samples), max_length, max(token_index.values()) + 1))
for i, sample in enumerate(samples):
for j, word in enumerate(sample.split()):
index = token_index.get(word)
results[i, j, index] = 1.
results
array([[[ 0., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0.], [ 0., 0., 1., 0., 0., 0., 0., 0., 0., 0., 0.], [ 0., 0., 0., 1., 0., 0., 0., 0., 0., 0., 0.], [ 0., 0., 0., 0., 1., 0., 0., 0., 0., 0., 0.], [ 0., 0., 0., 0., 0., 1., 0., 0., 0., 0., 0.], [ 0., 0., 0., 0., 0., 0., 1., 0., 0., 0., 0.], [ 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], [ 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], [ 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], [ 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.]], [[ 0., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0.], [ 0., 0., 0., 0., 0., 0., 0., 1., 0., 0., 0.], [ 0., 0., 0., 0., 0., 0., 0., 0., 1., 0., 0.], [ 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 0.], [ 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1.], [ 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], [ 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], [ 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], [ 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], [ 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.]]])
from keras.preprocessing.text import Tokenizer
samples = ['The cat sat on the mat.', 'The dog ate my homework.']
# We create a tokenizer, configured to only take
# into account the top-1000 most common on words
tokenizer = Tokenizer(num_words=1000)
# The builds the word index
tokenizer.fit_on_texts(samples)
# This turns strings into lists of integer indices.
sequences = tokenizer.texts_to_sequences(samples)
# You could also directly get the one-hot binary representations.
# Note that other vectorization modes than one-hot encoding are supported!
one_hot_results = tokenizer.texts_to_matrix(samples, mode='binary')
# This is how you can recover the word index that was computed
word_index = tokenizer.word_index
print('Found %s unique tokens.' % len(word_index))
Using TensorFlow backend.
Found 9 unique tokens.
one_hot_results
array([[ 0., 1., 1., ..., 0., 0., 0.], [ 0., 1., 0., ..., 0., 0., 0.]])
sequences
[[1, 6, 9, 2, 1, 4], [1, 5, 3, 7, 8]]
from keras.layers import Embedding
# The Embedding layer takes at least two arguments:
# the number of possible tokens, here 1000 (1 + maximum word index),
# and the dimensionality of the embeddings, here 64.
embedding_layer = Embedding(1000, 64)
from keras.datasets import imdb
from keras.preprocessing import sequence
max_features = 10000 # number of words to consider as features
maxlen = 20 # cut texts after this number of words (among top max_features most common words)
# Load the data as lists of integers.
(x_train, y_train), (x_test, y_test) = imdb.load_data(num_words=max_features)
# This turns our lists of integers
# into a 2D integer tensor of shape `(samples, maxlen)`
x_train = sequence.pad_sequences(x_train, maxlen=maxlen)
x_test = sequence.pad_sequences(x_test, maxlen=maxlen)
from keras.models import Sequential
from keras.layers import Flatten, Dense
model = Sequential()
# We specify the maximum input length to our Embedding layer
# so we can later flatten the embedded inputs
model.add(Embedding(10000, 8, input_length=maxlen))
# After the Embedding layer, our activations have shape `(samples, maxlen, 8)`
# Flatten the 3D tensor of embeddings into a 2D tensor of shape `(samples, maxlen * 8)`
model.add(Flatten())
# Add the classifier on top
model.add(Dense(1, activation='sigmoid'))
model.compile(optimizer='rmsprop', loss='binary_crossentropy', metrics=['acc'])
model.summary()
history = model.fit(x_train, y_train,
epochs=10,
batch_size=32,
validation_split=0.2)
_________________________________________________________________ Layer (type) Output Shape Param # ================================================================= embedding_2 (Embedding) (None, 20, 8) 80000 _________________________________________________________________ flatten_1 (Flatten) (None, 160) 0 _________________________________________________________________ dense_1 (Dense) (None, 1) 161 ================================================================= Total params: 80,161 Trainable params: 80,161 Non-trainable params: 0 _________________________________________________________________ Train on 20000 samples, validate on 5000 samples Epoch 1/10 20000/20000 [==============================] - 2s - loss: 0.6561 - acc: 0.6479 - val_loss: 0.5909 - val_acc: 0.7146 Epoch 2/10 20000/20000 [==============================] - 1s - loss: 0.5193 - acc: 0.7591 - val_loss: 0.5121 - val_acc: 0.7366 Epoch 3/10 20000/20000 [==============================] - 1s - loss: 0.4516 - acc: 0.7929 - val_loss: 0.4952 - val_acc: 0.7466 Epoch 4/10 20000/20000 [==============================] - 1s - loss: 0.4194 - acc: 0.8064 - val_loss: 0.4907 - val_acc: 0.7538 Epoch 5/10 20000/20000 [==============================] - 1s - loss: 0.3968 - acc: 0.8199 - val_loss: 0.4916 - val_acc: 0.7578 Epoch 6/10 20000/20000 [==============================] - 1s - loss: 0.3787 - acc: 0.8314 - val_loss: 0.4954 - val_acc: 0.7586 Epoch 7/10 20000/20000 [==============================] - 1s - loss: 0.3627 - acc: 0.8416 - val_loss: 0.5004 - val_acc: 0.7572 Epoch 8/10 20000/20000 [==============================] - 1s - loss: 0.3477 - acc: 0.8482 - val_loss: 0.5058 - val_acc: 0.7574 Epoch 9/10 20000/20000 [==============================] - 1s - loss: 0.3333 - acc: 0.8582 - val_loss: 0.5122 - val_acc: 0.7524 Epoch 10/10 20000/20000 [==============================] - 1s - loss: 0.3197 - acc: 0.8669 - val_loss: 0.5182 - val_acc: 0.7548
from keras.layers import SimpleRNN
from keras.models import Sequential
max_features = 10000 # number of words to consider as features
maxlen = 500 # cut texts after this number of words (among top max_features most common words)
batch_size = 32
print('Loading data...')
(input_train, y_train), (input_test, y_test) = imdb.load_data(nb_words=max_features)
print(len(input_train), 'train sequences')
print(len(input_test), 'test sequences')
print('Pad sequences (samples x time)')
input_train = sequence.pad_sequences(input_train, maxlen=maxlen)
input_test = sequence.pad_sequences(input_test, maxlen=maxlen)
print('input_train shape:', input_train.shape)
print('input_test shape:', input_test.shape)
Loading data... (25000, 'train sequences') (25000, 'test sequences') Pad sequences (samples x time) ('input_train shape:', (25000, 500)) ('input_test shape:', (25000, 500))
from keras.layers import Dense
model = Sequential()
model.add(Embedding(max_features, 32))
model.add(SimpleRNN(32))
model.add(Dense(1, activation='sigmoid'))
model.compile(optimizer='rmsprop', loss='binary_crossentropy', metrics=['acc'])
history = model.fit(input_train, y_train, nb_epoch=8, batch_size=128, validation_split=0.2)
Train on 20000 samples, validate on 5000 samples Epoch 1/8 20000/20000 [==============================] - 33s - loss: 0.6576 - acc: 0.6001 - val_loss: 0.7177 - val_acc: 0.5362 Epoch 2/8 20000/20000 [==============================] - 36s - loss: 0.4236 - acc: 0.8206 - val_loss: 0.4580 - val_acc: 0.7914 Epoch 3/8 20000/20000 [==============================] - 32s - loss: 0.2923 - acc: 0.8826 - val_loss: 0.3666 - val_acc: 0.8472 Epoch 4/8 20000/20000 [==============================] - 32s - loss: 0.2055 - acc: 0.9227 - val_loss: 0.4738 - val_acc: 0.7870 Epoch 5/8 20000/20000 [==============================] - 32s - loss: 0.1464 - acc: 0.9477 - val_loss: 0.4484 - val_acc: 0.8308 Epoch 6/8 20000/20000 [==============================] - 32s - loss: 0.0942 - acc: 0.9684 - val_loss: 1.7280 - val_acc: 0.5814 Epoch 7/8 20000/20000 [==============================] - 32s - loss: 0.0807 - acc: 0.9742 - val_loss: 0.6480 - val_acc: 0.7598 Epoch 8/8 20000/20000 [==============================] - 33s - loss: 0.0427 - acc: 0.9872 - val_loss: 0.5870 - val_acc: 0.7986
import pandas as pd
import matplotlib.pyplot as plt
import seaborn
%matplotlib inline
history_dict = history.history
# history_dictをpandasnのデータフレームに変換
d = pd.DataFrame(history_dict)
# loss, val_lossをプロット
d[['loss', 'val_loss']].plot()
plt.show()
d[['acc', 'val_acc']].plot()
plt.show()
from keras.layers import LSTM
model = Sequential()
model.add(Embedding(max_features, 32))
model.add(LSTM(32))
model.add(Dense(1, activation='sigmoid'))
model.compile(optimizer='rmsprop', loss='binary_crossentropy', metrics=['acc'])
history = model.fit(input_train, y_train, nb_epoch=10, batch_size=128, validation_split=0.2)
Train on 20000 samples, validate on 5000 samples Epoch 1/10 20000/20000 [==============================] - 181s - loss: 0.5529 - acc: 0.7366 - val_loss: 0.3917 - val_acc: 0.8424 Epoch 2/10 20000/20000 [==============================] - 178s - loss: 0.3070 - acc: 0.8762 - val_loss: 0.3548 - val_acc: 0.8478 Epoch 3/10 20000/20000 [==============================] - 186s - loss: 0.2414 - acc: 0.9083 - val_loss: 0.3468 - val_acc: 0.8466 Epoch 4/10 20000/20000 [==============================] - 194s - loss: 0.2050 - acc: 0.9249 - val_loss: 0.3181 - val_acc: 0.8852 Epoch 5/10 20000/20000 [==============================] - 196s - loss: 0.1837 - acc: 0.9330 - val_loss: 0.2947 - val_acc: 0.8820 Epoch 6/10 20000/20000 [==============================] - 165s - loss: 0.1626 - acc: 0.9411 - val_loss: 0.3284 - val_acc: 0.8526 Epoch 7/10 20000/20000 [==============================] - 171s - loss: 0.1506 - acc: 0.9479 - val_loss: 0.3142 - val_acc: 0.8808 Epoch 8/10 20000/20000 [==============================] - 176s - loss: 0.1378 - acc: 0.9503 - val_loss: 0.3504 - val_acc: 0.8858 Epoch 9/10 20000/20000 [==============================] - 170s - loss: 0.1269 - acc: 0.9564 - val_loss: 0.3428 - val_acc: 0.8758 Epoch 10/10 20000/20000 [==============================] - 168s - loss: 0.1153 - acc: 0.9602 - val_loss: 0.3337 - val_acc: 0.8708
history_dict = history.history
# history_dictをpandasnのデータフレームに変換
d = pd.DataFrame(history_dict)
# loss, val_lossをプロット
d[['loss', 'val_loss']].plot()
plt.show()
d[['acc', 'val_acc']].plot()
plt.show()
%%bash
cd Downloads
mkdir jena_climate
cd jena_climate
wget https://s3.amazonaws.com/keras-datasets/jena_climate_2009_2016.csv.zip
unzip jena_climate_2009_2016.csv.zip
Archive: jena_climate_2009_2016.csv.zip inflating: jena_climate_2009_2016.csv
--2017-07-15 07:41:46-- https://s3.amazonaws.com/keras-datasets/jena_climate_2009_2016.csv.zip Resolving s3.amazonaws.com (s3.amazonaws.com)... 52.216.228.83 Connecting to s3.amazonaws.com (s3.amazonaws.com)|52.216.228.83|:443... connected. HTTP request sent, awaiting response... 200 OK Length: 13568290 (13M) [application/zip] Saving to: 'jena_climate_2009_2016.csv.zip' 0K .......... .......... .......... .......... .......... 0% 72.7K 3m1s 50K .......... .......... .......... .......... .......... 0% 244K 1m57s 100K .......... .......... .......... .......... .......... 1% 256K 95s 150K .......... .......... .......... .......... .......... 1% 356K 80s 200K .......... .......... .......... .......... .......... 1% 567K 68s 250K .......... .......... .......... .......... .......... 2% 479K 61s 300K .......... .......... .......... .......... .......... 2% 544K 56s 350K .......... .......... .......... .......... .......... 3% 3.99M 49s 400K .......... .......... .......... .......... .......... 3% 503K 46s 450K .......... .......... .......... .......... .......... 3% 1.79M 42s 500K .......... .......... .......... .......... .......... 4% 527K 40s 550K .......... .......... .......... .......... .......... 4% 1.70M 37s 600K .......... .......... .......... .......... .......... 4% 810K 36s 650K .......... .......... .......... .......... .......... 5% 3.31M 33s 700K .......... .......... .......... .......... .......... 5% 1.43M 31s 750K .......... .......... .......... .......... .......... 6% 684K 30s 800K .......... .......... .......... .......... .......... 6% 3.70M 29s 850K .......... .......... .......... .......... .......... 6% 825K 28s 900K .......... .......... .......... .......... .......... 7% 3.95M 26s 950K .......... .......... .......... .......... .......... 7% 5.17M 25s 1000K .......... .......... .......... .......... .......... 7% 514K 25s 1050K .......... .......... .......... .......... .......... 8% 3.90M 24s 1100K .......... .......... .......... .......... .......... 8% 778K 23s 1150K .......... .......... .......... .......... .......... 9% 2.78M 23s 1200K .......... .......... .......... .......... .......... 9% 6.49M 22s 1250K .......... .......... .......... .......... .......... 9% 540K 22s 1300K .......... .......... .......... .......... .......... 10% 5.45M 21s 1350K .......... .......... .......... .......... .......... 10% 741K 21s 1400K .......... .......... .......... .......... .......... 10% 4.52M 20s 1450K .......... .......... .......... .......... .......... 11% 5.88M 19s 1500K .......... .......... .......... .......... .......... 11% 573K 19s 1550K .......... .......... .......... .......... .......... 12% 2.99M 19s 1600K .......... .......... .......... .......... .......... 12% 1.00M 18s 1650K .......... .......... .......... .......... .......... 12% 2.10M 18s 1700K .......... .......... .......... .......... .......... 13% 4.95M 17s 1750K .......... .......... .......... .......... .......... 13% 2.74M 17s 1800K .......... .......... .......... .......... .......... 13% 686K 17s 1850K .......... .......... .......... .......... .......... 14% 3.43M 16s 1900K .......... .......... .......... .......... .......... 14% 5.97M 16s 1950K .......... .......... .......... .......... .......... 15% 515K 16s 2000K .......... .......... .......... .......... .......... 15% 3.23M 16s 2050K .......... .......... .......... .......... .......... 15% 5.26M 15s 2100K .......... .......... .......... .......... .......... 16% 199K 16s 2150K .......... .......... .......... .......... .......... 16% 5.02M 16s 2200K .......... .......... .......... .......... .......... 16% 6.11M 15s 2250K .......... .......... .......... .......... .......... 17% 5.64M 15s 2300K .......... .......... .......... .......... .......... 17% 7.23M 15s 2350K .......... .......... .......... .......... .......... 18% 8.02M 14s 2400K .......... .......... .......... .......... .......... 18% 8.19M 14s 2450K .......... .......... .......... .......... .......... 18% 7.97M 14s 2500K .......... .......... .......... .......... .......... 19% 7.00M 13s 2550K .......... .......... .......... .......... .......... 19% 8.88M 13s 2600K .......... .......... .......... .......... .......... 19% 9.14M 13s 2650K .......... .......... .......... .......... .......... 20% 778K 13s 2700K .......... .......... .......... .......... .......... 20% 3.02M 12s 2750K .......... .......... .......... .......... .......... 21% 4.15M 12s 2800K .......... .......... .......... .......... .......... 21% 1.35M 12s 2850K .......... .......... .......... .......... .......... 21% 3.38M 12s 2900K .......... .......... .......... .......... .......... 22% 4.83M 12s 2950K .......... .......... .......... .......... .......... 22% 582K 12s 3000K .......... .......... .......... .......... .......... 23% 959K 12s 3050K .......... .......... .......... .......... .......... 23% 4.76M 11s 3100K .......... .......... .......... .......... .......... 23% 1.90M 11s 3150K .......... .......... .......... .......... .......... 24% 4.61M 11s 3200K .......... .......... .......... .......... .......... 24% 5.57M 11s 3250K .......... .......... .......... .......... .......... 24% 4.49M 11s 3300K .......... .......... .......... .......... .......... 25% 724K 11s 3350K .......... .......... .......... .......... .......... 25% 498K 11s 3400K .......... .......... .......... .......... .......... 26% 4.25M 11s 3450K .......... .......... .......... .......... .......... 26% 5.99M 10s 3500K .......... .......... .......... .......... .......... 26% 4.90M 10s 3550K .......... .......... .......... .......... .......... 27% 5.01M 10s 3600K .......... .......... .......... .......... .......... 27% 6.39M 10s 3650K .......... .......... .......... .......... .......... 27% 5.88M 10s 3700K .......... .......... .......... .......... .......... 28% 383K 10s 3750K .......... .......... .......... .......... .......... 28% 807K 10s 3800K .......... .......... .......... .......... .......... 29% 3.35M 10s 3850K .......... .......... .......... .......... .......... 29% 3.05M 10s 3900K .......... .......... .......... .......... .......... 29% 4.30M 9s 3950K .......... .......... .......... .......... .......... 30% 4.30M 9s 4000K .......... .......... .......... .......... .......... 30% 4.50M 9s 4050K .......... .......... .......... .......... .......... 30% 821K 9s 4100K .......... .......... .......... .......... .......... 31% 1.08M 9s 4150K .......... .......... .......... .......... .......... 31% 1.49M 9s 4200K .......... .......... .......... .......... .......... 32% 4.14M 9s 4250K .......... .......... .......... .......... .......... 32% 4.18M 9s 4300K .......... .......... .......... .......... .......... 32% 4.65M 9s 4350K .......... .......... .......... .......... .......... 33% 2.62M 8s 4400K .......... .......... .......... .......... .......... 33% 4.71M 8s 4450K .......... .......... .......... .......... .......... 33% 606K 8s 4500K .......... .......... .......... .......... .......... 34% 1.25M 8s 4550K .......... .......... .......... .......... .......... 34% 3.83M 8s 4600K .......... .......... .......... .......... .......... 35% 4.09M 8s 4650K .......... .......... .......... .......... .......... 35% 2.48M 8s 4700K .......... .......... .......... .......... .......... 35% 2.42M 8s 4750K .......... .......... .......... .......... .......... 36% 4.15M 8s 4800K .......... .......... .......... .......... .......... 36% 842K 8s 4850K .......... .......... .......... .......... .......... 36% 1.42M 8s 4900K .......... .......... .......... .......... .......... 37% 2.00M 8s 4950K .......... .......... .......... .......... .......... 37% 3.92M 8s 5000K .......... .......... .......... .......... .......... 38% 853K 7s 5050K .......... .......... .......... .......... .......... 38% 4.85M 7s 5100K .......... .......... .......... .......... .......... 38% 5.89M 7s 5150K .......... .......... .......... .......... .......... 39% 7.99M 7s 5200K .......... .......... .......... .......... .......... 39% 831K 7s 5250K .......... .......... .......... .......... .......... 39% 1.75M 7s 5300K .......... .......... .......... .......... .......... 40% 3.36M 7s 5350K .......... .......... .......... .......... .......... 40% 903K 7s 5400K .......... .......... .......... .......... .......... 41% 4.38M 7s 5450K .......... .......... .......... .......... .......... 41% 2.31M 7s 5500K .......... .......... .......... .......... .......... 41% 5.51M 7s 5550K .......... .......... .......... .......... .......... 42% 188K 7s 5600K .......... .......... .......... .......... .......... 42% 3.57M 7s 5650K .......... .......... .......... .......... .......... 43% 6.24M 7s 5700K .......... .......... .......... .......... .......... 43% 6.29M 7s 5750K .......... .......... .......... .......... .......... 43% 7.46M 7s 5800K .......... .......... .......... .......... .......... 44% 8.71M 7s 5850K .......... .......... .......... .......... .......... 44% 9.08M 6s 5900K .......... .......... .......... .......... .......... 44% 8.39M 6s 5950K .......... .......... .......... .......... .......... 45% 10.4M 6s 6000K .......... .......... .......... .......... .......... 45% 12.7M 6s 6050K .......... .......... .......... .......... .......... 46% 1.04M 6s 6100K .......... .......... .......... .......... .......... 46% 3.94M 6s 6150K .......... .......... .......... .......... .......... 46% 5.93M 6s 6200K .......... .......... .......... .......... .......... 47% 885K 6s 6250K .......... .......... .......... .......... .......... 47% 2.03M 6s 6300K .......... .......... .......... .......... .......... 47% 3.53M 6s 6350K .......... .......... .......... .......... .......... 48% 5.56M 6s 6400K .......... .......... .......... .......... .......... 48% 614K 6s 6450K .......... .......... .......... .......... .......... 49% 3.32M 6s 6500K .......... .......... .......... .......... .......... 49% 2.74M 6s 6550K .......... .......... .......... .......... .......... 49% 687K 6s 6600K .......... .......... .......... .......... .......... 50% 4.16M 5s 6650K .......... .......... .......... .......... .......... 50% 253K 6s 6700K .......... .......... .......... .......... .......... 50% 3.26M 6s 6750K .......... .......... .......... .......... .......... 51% 8.01M 5s 6800K .......... .......... .......... .......... .......... 51% 9.79M 5s 6850K .......... .......... .......... .......... .......... 52% 10.2M 5s 6900K .......... .......... .......... .......... .......... 52% 9.01M 5s 6950K .......... .......... .......... .......... .......... 52% 9.25M 5s 7000K .......... .......... .......... .......... .......... 53% 10.0M 5s 7050K .......... .......... .......... .......... .......... 53% 816K 5s 7100K .......... .......... .......... .......... .......... 53% 5.10M 5s 7150K .......... .......... .......... .......... .......... 54% 787K 5s 7200K .......... .......... .......... .......... .......... 54% 2.97M 5s 7250K .......... .......... .......... .......... .......... 55% 6.25M 5s 7300K .......... .......... .......... .......... .......... 55% 564K 5s 7350K .......... .......... .......... .......... .......... 55% 3.67M 5s 7400K .......... .......... .......... .......... .......... 56% 935K 5s 7450K .......... .......... .......... .......... .......... 56% 1.14M 5s 7500K .......... .......... .......... .......... .......... 56% 5.06M 5s 7550K .......... .......... .......... .......... .......... 57% 778K 5s 7600K .......... .......... .......... .......... .......... 57% 2.34M 5s 7650K .......... .......... .......... .......... .......... 58% 5.77M 4s 7700K .......... .......... .......... .......... .......... 58% 646K 4s 7750K .......... .......... .......... .......... .......... 58% 2.11M 4s 7800K .......... .......... .......... .......... .......... 59% 4.75M 4s 7850K .......... .......... .......... .......... .......... 59% 753K 4s 7900K .......... .......... .......... .......... .......... 59% 2.21M 4s 7950K .......... .......... .......... .......... .......... 60% 4.93M 4s 8000K .......... .......... .......... .......... .......... 60% 625K 4s 8050K .......... .......... .......... .......... .......... 61% 3.94M 4s 8100K .......... .......... .......... .......... .......... 61% 843K 4s 8150K .......... .......... .......... .......... .......... 61% 1.98M 4s 8200K .......... .......... .......... .......... .......... 62% 3.94M 4s 8250K .......... .......... .......... .......... .......... 62% 658K 4s 8300K .......... .......... .......... .......... .......... 63% 3.53M 4s 8350K .......... .......... .......... .......... .......... 63% 5.31M 4s 8400K .......... .......... .......... .......... .......... 63% 696K 4s 8450K .......... .......... .......... .......... .......... 64% 2.79M 4s 8500K .......... .......... .......... .......... .......... 64% 5.85M 4s 8550K .......... .......... .......... .......... .......... 64% 680K 4s 8600K .......... .......... .......... .......... .......... 65% 3.28M 4s 8650K .......... .......... .......... .......... .......... 65% 4.49M 4s 8700K .......... .......... .......... .......... .......... 66% 533K 4s 8750K .......... .......... .......... .......... .......... 66% 3.76M 4s 8800K .......... .......... .......... .......... .......... 66% 6.96M 4s 8850K .......... .......... .......... .......... .......... 67% 870K 3s 8900K .......... .......... .......... .......... .......... 67% 4.57M 3s 8950K .......... .......... .......... .......... .......... 67% 541K 3s 9000K .......... .......... .......... .......... .......... 68% 3.55M 3s 9050K .......... .......... .......... .......... .......... 68% 4.48M 3s 9100K .......... .......... .......... .......... .......... 69% 987K 3s 9150K .......... .......... .......... .......... .......... 69% 1.12M 3s 9200K .......... .......... .......... .......... .......... 69% 3.72M 3s 9250K .......... .......... .......... .......... .......... 70% 786K 3s 9300K .......... .......... .......... .......... .......... 70% 4.29M 3s 9350K .......... .......... .......... .......... .......... 70% 5.41M 3s 9400K .......... .......... .......... .......... .......... 71% 910K 3s 9450K .......... .......... .......... .......... .......... 71% 1.13M 3s 9500K .......... .......... .......... .......... .......... 72% 3.98M 3s 9550K .......... .......... .......... .......... .......... 72% 574K 3s 9600K .......... .......... .......... .......... .......... 72% 3.54M 3s 9650K .......... .......... .......... .......... .......... 73% 5.61M 3s 9700K .......... .......... .......... .......... .......... 73% 1.10M 3s 9750K .......... .......... .......... .......... .......... 73% 1.70M 3s 9800K .......... .......... .......... .......... .......... 74% 545K 3s 9850K .......... .......... .......... .......... .......... 74% 3.36M 3s 9900K .......... .......... .......... .......... .......... 75% 3.89M 3s 9950K .......... .......... .......... .......... .......... 75% 2.14M 3s 10000K .......... .......... .......... .......... .......... 75% 941K 3s 10050K .......... .......... .......... .......... .......... 76% 3.45M 2s 10100K .......... .......... .......... .......... .......... 76% 588K 2s 10150K .......... .......... .......... .......... .......... 76% 3.07M 2s 10200K .......... .......... .......... .......... .......... 77% 221K 2s 10250K .......... .......... .......... .......... .......... 77% 8.39M 2s 10300K .......... .......... .......... .......... .......... 78% 5.85M 2s 10350K .......... .......... .......... .......... .......... 78% 7.67M 2s 10400K .......... .......... .......... .......... .......... 78% 7.28M 2s 10450K .......... .......... .......... .......... .......... 79% 7.59M 2s 10500K .......... .......... .......... .......... .......... 79% 6.60M 2s 10550K .......... .......... .......... .......... .......... 79% 6.30M 2s 10600K .......... .......... .......... .......... .......... 80% 474K 2s 10650K .......... .......... .......... .......... .......... 80% 2.55M 2s 10700K .......... .......... .......... .......... .......... 81% 4.75M 2s 10750K .......... .......... .......... .......... .......... 81% 4.91M 2s 10800K .......... .......... .......... .......... .......... 81% 748K 2s 10850K .......... .......... .......... .......... .......... 82% 499K 2s 10900K .......... .......... .......... .......... .......... 82% 2.54M 2s 10950K .......... .......... .......... .......... .......... 83% 5.11M 2s 11000K .......... .......... .......... .......... .......... 83% 5.30M 2s 11050K .......... .......... .......... .......... .......... 83% 993K 2s 11100K .......... .......... .......... .......... .......... 84% 441K 2s 11150K .......... .......... .......... .......... .......... 84% 4.11M 2s 11200K .......... .......... .......... .......... .......... 84% 5.11M 2s 11250K .......... .......... .......... .......... .......... 85% 5.01M 2s 11300K .......... .......... .......... .......... .......... 85% 938K 1s 11350K .......... .......... .......... .......... .......... 86% 465K 1s 11400K .......... .......... .......... .......... .......... 86% 3.83M 1s 11450K .......... .......... .......... .......... .......... 86% 4.46M 1s 11500K .......... .......... .......... .......... .......... 87% 4.27M 1s 11550K .......... .......... .......... .......... .......... 87% 659K 1s 11600K .......... .......... .......... .......... .......... 87% 614K 1s 11650K .......... .......... .......... .......... .......... 88% 2.89M 1s 11700K .......... .......... .......... .......... .......... 88% 4.38M 1s 11750K .......... .......... .......... .......... .......... 89% 1.00M 1s 11800K .......... .......... .......... .......... .......... 89% 1.40M 1s 11850K .......... .......... .......... .......... .......... 89% 488K 1s 11900K .......... .......... .......... .......... .......... 90% 3.85M 1s 11950K .......... .......... .......... .......... .......... 90% 5.46M 1s 12000K .......... .......... .......... .......... .......... 90% 6.86M 1s 12050K .......... .......... .......... .......... .......... 91% 797K 1s 12100K .......... .......... .......... .......... .......... 91% 483K 1s 12150K .......... .......... .......... .......... .......... 92% 4.03M 1s 12200K .......... .......... .......... .......... .......... 92% 3.20M 1s 12250K .......... .......... .......... .......... .......... 92% 4.15M 1s 12300K .......... .......... .......... .......... .......... 93% 764K 1s 12350K .......... .......... .......... .......... .......... 93% 334K 1s 12400K .......... .......... .......... .......... .......... 93% 3.00M 1s 12450K .......... .......... .......... .......... .......... 94% 236K 1s 12500K .......... .......... .......... .......... .......... 94% 2.64M 1s 12550K .......... .......... .......... .......... .......... 95% 7.10M 1s 12600K .......... .......... .......... .......... .......... 95% 7.03M 0s 12650K .......... .......... .......... .......... .......... 95% 9.04M 0s 12700K .......... .......... .......... .......... .......... 96% 10.3M 0s 12750K .......... .......... .......... .......... .......... 96% 538K 0s 12800K .......... .......... .......... .......... .......... 96% 759K 0s 12850K .......... .......... .......... .......... .......... 97% 1.67M 0s 12900K .......... .......... .......... .......... .......... 97% 4.74M 0s 12950K .......... .......... .......... .......... .......... 98% 606K 0s 13000K .......... .......... .......... .......... .......... 98% 741K 0s 13050K .......... .......... .......... .......... .......... 98% 1.42M 0s 13100K .......... .......... .......... .......... .......... 99% 5.71M 0s 13150K .......... .......... .......... .......... .......... 99% 547K 0s 13200K .......... .......... .......... .......... .......... 99% 1.97M 0s 13250K 100% 540G=11s 2017-07-15 07:41:58 (1.22 MB/s) - 'jena_climate_2009_2016.csv.zip' saved [13568290/13568290]
import os
data_dir = 'Downloads/jena_climate'
fname = os.path.join(data_dir, 'jena_climate_2009_2016.csv')
f = open(fname)
data = f.read()
f.close()
lines = data.split('\n')
header = lines[0].split(',')
lines = lines[1:]
print(header)
print(len(lines))
['"Date Time"', '"p (mbar)"', '"T (degC)"', '"Tpot (K)"', '"Tdew (degC)"', '"rh (%)"', '"VPmax (mbar)"', '"VPact (mbar)"', '"VPdef (mbar)"', '"sh (g/kg)"', '"H2OC (mmol/mol)"', '"rho (g/m**3)"', '"wv (m/s)"', '"max. wv (m/s)"', '"wd (deg)"'] 420551
import numpy as np
float_data = np.zeros((len(lines), len(header) - 1))
for i, line in enumerate(lines):
values = [float(x) for x in line.split(',')[1:]]
float_data[i, :] = values
import pandas as pd
import matplotlib.pyplot as plt
import seaborn
%matplotlib inline
temp = float_data[:, 1] # temperature (in degrees Celsius)
plt.plot(range(len(temp)), temp)
[<matplotlib.lines.Line2D at 0x7f78d4d385d0>]
plt.plot(range(1440), temp[:1440])
[<matplotlib.lines.Line2D at 0x7f78d3578cd0>]
データの正規化
mean = float_data[:200000].mean(axis=0)
float_data -= mean
std = float_data[:200000].std(axis=0)
float_data /= std
def generator(data, lookback, delay, min_index, max_index,
shuffle=False, batch_size=128, step=6):
if max_index is None:
max_index = len(data) - delay - 1
i = min_index + lookback
while 1:
if shuffle:
rows = np.random.randint(min_index + lookback, max_index, size=batch_size)
else:
if i + batch_size >= max_index:
i = min_index + lookback
rows = np.arange(i, min(i + batch_size, max_index))
i += len(rows)
samples = np.zeros((len(rows), lookback // step, data.shape[-1]))
targets = np.zeros((len(rows),))
for j, row in enumerate(rows):
indices = range(rows[j] - lookback, rows[j], step)
samples[j] = data[indices]
targets[j] = data[rows[j] + delay][1]
yield samples, targets
データは10分間隔で測定された温度データ
lookback = 1440
step = 6
delay = 144
batch_size = 128
train_gen = generator(float_data, lookback=lookback, delay=delay,
min_index=0, max_index=200000, shuffle=True, step=step,
batch_size=batch_size)
val_gen = generator(float_data, lookback=lookback, delay=delay,
min_index=200001, max_index=300000, step=step, batch_size=batch_size)
test_gen = generator(float_data, lookback=lookback, delay=delay,
min_index=300001, max_index=None, step=step, batch_size=batch_size)
# This is how many steps to draw from `val_gen` in order to see the whole validation set:
val_steps = (300000 - 200001 - lookback) // batch_size
# This is how many steps to draw from `test_gen` in order to see the whole test set:
test_steps = (len(float_data) - 300001 - lookback) // batch_size
def evaluate_naive_method():
batch_maes = []
for step in range(val_steps):
samples, targets = next(val_gen)
preds = samples[:, -1, 1]
mae = np.mean(np.abs(preds - targets))
batch_maes.append(mae)
print(np.mean(batch_maes))
evaluate_naive_method()
0.289735972991
celsius_mae = 0.29 * std[1]
celsius_mae
2.5672247338393395
単純なニューラルネットモデルで、どの程度予測できるか見てみる。
以下の結果は図6.18とは大きく異なる結果となっている。
from keras.models import Sequential
from keras import layers
from keras.optimizers import RMSprop
model = Sequential()
model.add(layers.Flatten(input_shape=(lookback // step, float_data.shape[-1])))
model.add(layers.Dense(32, activation='relu'))
model.add(layers.Dense(1))
model.compile(optimizer=RMSprop(), loss='mae')
history = model.fit_generator(train_gen,
steps_per_epoch=500,
epochs=20,
validation_data=val_gen,
validation_steps=val_steps)
Epoch 1/20 500/500 [==============================] - 13s - loss: 1.8228 - val_loss: 1.1467 Epoch 2/20 500/500 [==============================] - 13s - loss: 1.4039 - val_loss: 2.0780 Epoch 3/20 500/500 [==============================] - 13s - loss: 3.1643 - val_loss: 4.6592 Epoch 4/20 500/500 [==============================] - 12s - loss: 6.5291 - val_loss: 5.9072 Epoch 5/20 500/500 [==============================] - 12s - loss: 10.6773 - val_loss: 15.8527 Epoch 6/20 500/500 [==============================] - 11s - loss: 15.3495 - val_loss: 18.8217 Epoch 7/20 500/500 [==============================] - 11s - loss: 20.0555 - val_loss: 29.2959 Epoch 8/20 500/500 [==============================] - 12s - loss: 24.8096 - val_loss: 19.2993 Epoch 9/20 500/500 [==============================] - 12s - loss: 29.8820 - val_loss: 26.0697 Epoch 10/20 500/500 [==============================] - 12s - loss: 34.9356 - val_loss: 30.9325 Epoch 11/20 500/500 [==============================] - 12s - loss: 39.6692 - val_loss: 54.1092 Epoch 12/20 500/500 [==============================] - 12s - loss: 44.7078 - val_loss: 53.9539 Epoch 13/20 500/500 [==============================] - 11s - loss: 49.5438 - val_loss: 29.9209 Epoch 14/20 500/500 [==============================] - 11s - loss: 54.2989 - val_loss: 86.8137 Epoch 15/20 500/500 [==============================] - 12s - loss: 59.0245 - val_loss: 73.1270 Epoch 16/20 500/500 [==============================] - 12s - loss: 63.8793 - val_loss: 96.4103 Epoch 17/20 500/500 [==============================] - 11s - loss: 68.6370 - val_loss: 6.3241 Epoch 18/20 500/500 [==============================] - 12s - loss: 74.3662 - val_loss: 51.7965 Epoch 19/20 500/500 [==============================] - 13s - loss: 78.8554 - val_loss: 102.2837 Epoch 20/20 500/500 [==============================] - 13s - loss: 84.1538 - val_loss: 45.6256
def plot_history(history):
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs = range(len(loss))
dict = {'loss':loss, 'val_loss': val_loss}
d = pd.DataFrame(dict)
d.plot()
plt.title('Training and validation loss')
plt.show()
plot_history(history)
リカレントモデルで解く
model = Sequential()
model.add(layers.GRU(32, input_shape=(None, float_data.shape[-1])))
model.add(layers.Dense(1))
model.compile(optimizer=RMSprop(), loss='mae')
history = model.fit_generator(train_gen,
steps_per_epoch=500,
epochs=20,
validation_data=val_gen,
validation_steps=val_steps)
Epoch 1/20 500/500 [==============================] - 267s - loss: 0.3043 - val_loss: 0.2725 Epoch 2/20 500/500 [==============================] - 272s - loss: 0.2850 - val_loss: 0.2770 Epoch 3/20 500/500 [==============================] - 286s - loss: 0.2793 - val_loss: 0.2664 Epoch 4/20 500/500 [==============================] - 267s - loss: 0.2741 - val_loss: 0.2643 Epoch 5/20 500/500 [==============================] - 263s - loss: 0.2674 - val_loss: 0.2684 Epoch 6/20 500/500 [==============================] - 263s - loss: 0.2642 - val_loss: 0.2695 Epoch 7/20 500/500 [==============================] - 264s - loss: 0.2587 - val_loss: 0.2708 Epoch 8/20 500/500 [==============================] - 263s - loss: 0.2551 - val_loss: 0.2699 Epoch 9/20 500/500 [==============================] - 261s - loss: 0.2495 - val_loss: 0.2718 Epoch 10/20 500/500 [==============================] - 262s - loss: 0.2453 - val_loss: 0.2732 Epoch 11/20 500/500 [==============================] - 257s - loss: 0.2415 - val_loss: 0.2772 Epoch 12/20 500/500 [==============================] - 256s - loss: 0.2370 - val_loss: 0.2790 Epoch 13/20 500/500 [==============================] - 256s - loss: 0.2327 - val_loss: 0.2806 Epoch 14/20 500/500 [==============================] - 256s - loss: 0.2298 - val_loss: 0.2820 Epoch 15/20 500/500 [==============================] - 259s - loss: 0.2263 - val_loss: 0.2837 Epoch 16/20 500/500 [==============================] - 283s - loss: 0.2234 - val_loss: 0.2903 Epoch 17/20 500/500 [==============================] - 278s - loss: 0.2200 - val_loss: 0.2876 Epoch 18/20 500/500 [==============================] - 283s - loss: 0.2168 - val_loss: 0.2964 Epoch 19/20 500/500 [==============================] - 273s - loss: 0.2129 - val_loss: 0.2899 Epoch 20/20 500/500 [==============================] - 263s - loss: 0.2095 - val_loss: 0.3016
オーバーフィッティングの兆候として、トレーニングのlossは減少を続けているが、 val_lossは、最初の数エポックで最小となり、その後val_lossはだんだんと増加する傾向が見られる。
plot_history(history)
# epochsは20にした
model = Sequential()
model.add(layers.GRU(32,
dropout=0.2,
recurrent_dropout=0.2,
input_shape=(None, float_data.shape[-1])))
model.add(layers.Dense(1))
model.compile(optimizer=RMSprop(), loss='mae')
history_dp = model.fit_generator(train_gen,
steps_per_epoch=500,
epochs=20,
validation_data=val_gen,
validation_steps=val_steps)
Epoch 1/20 500/500 [==============================] - 285s - loss: 0.3406 - val_loss: 0.2766 Epoch 2/20 500/500 [==============================] - 291s - loss: 0.3130 - val_loss: 0.2770 Epoch 3/20 500/500 [==============================] - 289s - loss: 0.3068 - val_loss: 0.2689 Epoch 4/20 500/500 [==============================] - 297s - loss: 0.3054 - val_loss: 0.2718 Epoch 5/20 500/500 [==============================] - 286s - loss: 0.3010 - val_loss: 0.2674 Epoch 6/20 500/500 [==============================] - 305s - loss: 0.2957 - val_loss: 0.2661 Epoch 7/20 500/500 [==============================] - 284s - loss: 0.2933 - val_loss: 0.2697 Epoch 8/20 500/500 [==============================] - 282s - loss: 0.2934 - val_loss: 0.2651 Epoch 9/20 500/500 [==============================] - 287s - loss: 0.2912 - val_loss: 0.2669 Epoch 10/20 500/500 [==============================] - 289s - loss: 0.2904 - val_loss: 0.2682 Epoch 11/20 500/500 [==============================] - 290s - loss: 0.2872 - val_loss: 0.2672 Epoch 12/20 500/500 [==============================] - 308s - loss: 0.2863 - val_loss: 0.2628 Epoch 13/20 500/500 [==============================] - 300s - loss: 0.2853 - val_loss: 0.2640 Epoch 14/20 500/500 [==============================] - 288s - loss: 0.2841 - val_loss: 0.2658 Epoch 15/20 500/500 [==============================] - 282s - loss: 0.2835 - val_loss: 0.2634 Epoch 16/20 500/500 [==============================] - 292s - loss: 0.2825 - val_loss: 0.2655 Epoch 17/20 500/500 [==============================] - 283s - loss: 0.2813 - val_loss: 0.2650 Epoch 18/20 500/500 [==============================] - 297s - loss: 0.2804 - val_loss: 0.2635 Epoch 19/20 500/500 [==============================] - 298s - loss: 0.2800 - val_loss: 0.2658 Epoch 20/20 500/500 [==============================] - 296s - loss: 0.2788 - val_loss: 0.2658
plot_history(history_dp)
kerasでリカレント層をスタックする場合、すべての内部状態を返す必要がある。このため、return_sequences=Trueオプションを指定する。
# epochsは20にした
# 1層の場合との比較のため、dropout=0.2, recurrent_dropout=0.2に変更
model = Sequential()
model.add(layers.GRU(32,
dropout=0.2,
recurrent_dropout=0.2,
return_sequences=True,
input_shape=(None, float_data.shape[-1])))
model.add(layers.GRU(64, activation='relu',
dropout=0.2,
recurrent_dropout=0.2))
model.add(layers.Dense(1))
model.compile(optimizer=RMSprop(), loss='mae')
history_stacked_dp = model.fit_generator(train_gen,
steps_per_epoch=500,
epochs=20,
validation_data=val_gen,
validation_steps=val_steps)
Epoch 1/20 500/500 [==============================] - 874s - loss: 0.3460 - val_loss: 0.2941 Epoch 2/20 500/500 [==============================] - 837s - loss: 0.3232 - val_loss: 0.2780 Epoch 3/20 500/500 [==============================] - 2138s - loss: 0.3144 - val_loss: 0.2765 Epoch 4/20 500/500 [==============================] - 847s - loss: 0.3107 - val_loss: 0.2841 Epoch 5/20 500/500 [==============================] - 815s - loss: 0.3040 - val_loss: 0.2700 Epoch 6/20 500/500 [==============================] - 809s - loss: 0.3001 - val_loss: 0.2799 Epoch 7/20 500/500 [==============================] - 813s - loss: 0.2958 - val_loss: 0.2796 Epoch 8/20 500/500 [==============================] - 819s - loss: 0.2918 - val_loss: 0.2779 Epoch 9/20 500/500 [==============================] - 811s - loss: 0.2906 - val_loss: 0.2711 Epoch 10/20 500/500 [==============================] - 910s - loss: 0.2867 - val_loss: 0.2899 Epoch 11/20 500/500 [==============================] - 900s - loss: 0.2853 - val_loss: 0.2808 Epoch 12/20 500/500 [==============================] - 937s - loss: 0.2822 - val_loss: 0.2901 Epoch 13/20 500/500 [==============================] - 877s - loss: 0.2827 - val_loss: 0.2857 Epoch 14/20 500/500 [==============================] - 857s - loss: 0.2806 - val_loss: 0.2702 Epoch 15/20 500/500 [==============================] - 817s - loss: 0.2794 - val_loss: 0.2952 Epoch 16/20 500/500 [==============================] - 907s - loss: 0.2747 - val_loss: 0.2737 Epoch 17/20 500/500 [==============================] - 889s - loss: 0.2734 - val_loss: 0.2696 Epoch 18/20 500/500 [==============================] - 873s - loss: 0.2725 - val_loss: 0.2680 Epoch 19/20 500/500 [==============================] - 852s - loss: 0.2720 - val_loss: 0.2937 Epoch 20/20 500/500 [==============================] - 846s - loss: 0.2708 - val_loss: 0.2745
plot_history(history_dp)
以下のコードはエラーとなった!
model = Sequential()
model.add(layers.Bidirectional(
layers.GRU(32, input_shape=(None, float_data.shape[-1]))))
model.add(layers.Dense(1))
model.compile(optimizer=RMSprop(), loss='mae')
history_dp = model.fit_generator(train_gen_rev,
steps_per_epoch=500,
epochs=20,
validation_data=val_gen_rev,
validation_steps=val_steps)
ValueErrorTraceback (most recent call last) <ipython-input-56-be7c7319eff1> in <module>() 1 model = Sequential() 2 model.add(layers.Bidirectional( ----> 3 layers.GRU(32, input_shape=(None, float_data.shape[-1])))) 4 model.add(layers.Dense(1)) 5 model.compile(optimizer=RMSprop(), loss='mae') /usr/lib/sagemath/local/lib/python2.7/site-packages/keras/models.pyc in add(self, layer) 421 # create an input layer 422 if not hasattr(layer, 'batch_input_shape'): --> 423 raise ValueError('The first layer in a ' 424 'Sequential model must ' 425 'get an `input_shape` or ' ValueError: The first layer in a Sequential model must get an `input_shape` or `batch_input_shape` argument.
# We reuse the `generator` function defined at the previous section.
# This was previously set to 6 (one point per hour).
# Now 3 (one point per 30 min).
step = 3
lookback = 720 # Unchanged
delay = 144 # Unchanged
train_gen = generator(float_data, lookback=lookback, delay=delay,
min_index=0, max_index=200000, shuffle=True, step=step)
val_gen = generator(float_data, lookback=lookback, delay=delay,
min_index=200001, max_index=300000, step=step)
test_gen = generator(float_data, lookback=lookback, delay=delay,
min_index=300001, max_index=None, step=step)
val_steps = (300000 - 200001 - lookback) // 128
test_steps = (len(float_data) - 300001 - lookback) // 128
from keras.models import Sequential
from keras import layers
from keras.optimizers import RMSprop
model = Sequential()
model.add(layers.Conv1D(32, 5, activation='relu',
input_shape=(None, float_data.shape[-1])))
model.add(layers.MaxPooling1D(3))
model.add(layers.Conv1D(32, 5, activation='relu'))
model.add(layers.GRU(32, dropout=0.1, recurrent_dropout=0.5))
model.add(layers.Dense(1))
model.summary()
model.compile(optimizer=RMSprop(), loss='mae')
history = model.fit_generator(train_gen,
steps_per_epoch=500,
epochs=20,
validation_data=val_gen,
validation_steps=val_steps)
_________________________________________________________________ Layer (type) Output Shape Param # ================================================================= conv1d_3 (Conv1D) (None, None, 32) 2272 _________________________________________________________________ max_pooling1d_2 (MaxPooling1 (None, None, 32) 0 _________________________________________________________________ conv1d_4 (Conv1D) (None, None, 32) 5152 _________________________________________________________________ gru_2 (GRU) (None, 32) 6240 _________________________________________________________________ dense_2 (Dense) (None, 1) 33 ================================================================= Total params: 13,697 Trainable params: 13,697 Non-trainable params: 0 _________________________________________________________________ Epoch 1/20 500/500 [==============================] - 165s - loss: 0.3441 - val_loss: 0.2901 Epoch 2/20 500/500 [==============================] - 162s - loss: 0.3092 - val_loss: 0.2984 Epoch 3/20 500/500 [==============================] - 165s - loss: 0.3013 - val_loss: 0.2815 Epoch 4/20 500/500 [==============================] - 180s - loss: 0.2907 - val_loss: 0.2725 Epoch 5/20 500/500 [==============================] - 167s - loss: 0.2867 - val_loss: 0.2717 Epoch 6/20 500/500 [==============================] - 166s - loss: 0.2793 - val_loss: 0.2817 Epoch 7/20 500/500 [==============================] - 162s - loss: 0.2746 - val_loss: 0.2805 Epoch 8/20 500/500 [==============================] - 167s - loss: 0.2709 - val_loss: 0.2781 Epoch 9/20 500/500 [==============================] - 174s - loss: 0.2651 - val_loss: 0.2800 Epoch 10/20 500/500 [==============================] - 173s - loss: 0.2597 - val_loss: 0.2888 Epoch 11/20 500/500 [==============================] - 167s - loss: 0.2560 - val_loss: 0.2859 Epoch 12/20 500/500 [==============================] - 153s - loss: 0.2513 - val_loss: 0.2909 Epoch 13/20 500/500 [==============================] - 166s - loss: 0.2484 - val_loss: 0.2950 Epoch 14/20 500/500 [==============================] - 171s - loss: 0.2455 - val_loss: 0.2897 Epoch 15/20 500/500 [==============================] - 175s - loss: 0.2416 - val_loss: 0.2913 Epoch 16/20 500/500 [==============================] - 167s - loss: 0.2372 - val_loss: 0.2945 Epoch 17/20 500/500 [==============================] - 163s - loss: 0.2358 - val_loss: 0.3052 Epoch 18/20 500/500 [==============================] - 166s - loss: 0.2336 - val_loss: 0.3068 Epoch 19/20 500/500 [==============================] - 175s - loss: 0.2308 - val_loss: 0.3063 Epoch 20/20 500/500 [==============================] - 176s - loss: 0.2285 - val_loss: 0.3097
plot_history(history)