In [1]:
from keras.preprocessing import sequence
from keras.models import Sequential, Model
from keras.layers import Dense, Embedding, Input
from keras.layers import LSTM, GlobalMaxPooling1D, Flatten
from keras.datasets import imdb

import numpy as np

max_features = 20
maxlen = 5  # cut texts after this number of words (among top max_features most common words)
batch_size = 128
epochs = 2

print('Loading data...')
(x_train, y_train), (x_test, y_test) = imdb.load_data(num_words=max_features)
print(len(x_train), 'train sequences')
print(len(x_test), 'test sequences')

i = 10
x_train = x_train[:i]
y_train = y_train[:i]
x_test = x_test[:i]
y_test = y_test[:i]

print('Pad sequences (samples x time)')
x_train = sequence.pad_sequences(x_train, maxlen=maxlen)
x_test = sequence.pad_sequences(x_test, maxlen=maxlen)
print('x_train shape:', x_train.shape)
print('x_test shape:', x_test.shape)

print('Build model...')
inputs = Input(shape=(maxlen,), dtype='int32')
x = Embedding(max_features, output_dim=3, trainable=False)(inputs)
# x = LSTM(1, dropout=0.2)(x)
x = Flatten()(x)
outputs = Dense(1, activation='sigmoid')(x)

model = Model(inputs=inputs, outputs=outputs)
model.compile(loss='binary_crossentropy',
              optimizer='adam',
              metrics=['accuracy'])
model.summary()

print('Train...')
model.fit(x_train, y_train,
          batch_size=batch_size,
          epochs=epochs,
          validation_data=(x_test, y_test))

# print weights
names = [weight.name for layer in model.layers for weight in layer.weights]
weights = model.get_weights()

# suppress scientific notation
np.set_printoptions(suppress=True)
for name, weight in zip(names, weights):
    print(name, weight.shape)
    print(weight)

# obtain the output of an intermediate layer
intermediate_layer_model = Model(inputs=model.input,
                                 outputs=model.get_layer('embedding_1').output)
intermediate_output = intermediate_layer_model.predict(x_test[:1])

print("Input:", x_test[:1])
print("Intermediate output:", intermediate_output)
print('Predict value:', model.predict(x_test[:1]))
Using TensorFlow backend.
Loading data...
25000 train sequences
25000 test sequences
Pad sequences (samples x time)
x_train shape: (10, 5)
x_test shape: (10, 5)
Build model...
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_1 (InputLayer)         (None, 5)                 0         
_________________________________________________________________
embedding_1 (Embedding)      (None, 5, 3)              60        
_________________________________________________________________
flatten_1 (Flatten)          (None, 15)                0         
_________________________________________________________________
dense_1 (Dense)              (None, 1)                 16        
=================================================================
Total params: 76
Trainable params: 16
Non-trainable params: 60
_________________________________________________________________
Train...
Train on 10 samples, validate on 10 samples
Epoch 1/2
10/10 [==============================] - 0s 13ms/step - loss: 0.6900 - acc: 0.6000 - val_loss: 0.6933 - val_acc: 0.6000
Epoch 2/2
10/10 [==============================] - 0s 237us/step - loss: 0.6898 - acc: 0.6000 - val_loss: 0.6934 - val_acc: 0.5000
embedding_1/embeddings:0 (20, 3)
[[ 0.03710604  0.0447267   0.01845541]
 [ 0.02056861 -0.00752139  0.04584632]
 [ 0.02605296 -0.03791744 -0.032538  ]
 [ 0.04944486  0.02518579 -0.03178279]
 [ 0.03725732 -0.01928098  0.03238809]
 [ 0.00917146  0.00534006 -0.00904445]
 [ 0.00471218  0.00747512 -0.00115185]
 [-0.03455156  0.02233679  0.02515448]
 [-0.03276616 -0.02064025  0.0246176 ]
 [-0.02794037  0.02399851  0.02930913]
 [ 0.02345468  0.01916173 -0.04229976]
 [-0.03585964  0.03904844 -0.0273299 ]
 [-0.02466786 -0.03819505  0.00927155]
 [-0.03251936  0.03960699 -0.0251536 ]
 [-0.04313273  0.045066   -0.00468619]
 [-0.01276302 -0.04676872  0.00260699]
 [-0.01385712 -0.00873871 -0.02249327]
 [ 0.00373733 -0.04424334 -0.04431904]
 [-0.02187735 -0.01782734  0.03866576]
 [ 0.00147777  0.03964208 -0.01970644]]
dense_1/kernel:0 (15, 1)
[[-0.5859396 ]
 [-0.33103675]
 [-0.6103033 ]
 [-0.1878556 ]
 [ 0.25851333]
 [ 0.60678446]
 [-0.41208446]
 [ 0.14432031]
 [-0.6075742 ]
 [-0.3771886 ]
 [-0.5140718 ]
 [-0.56113195]
 [ 0.39510807]
 [ 0.590024  ]
 [-0.1833529 ]]
dense_1/bias:0 (1,)
[-0.00199986]
Input: [[ 2  2 14  6  2]]
Intermediate output: [[[ 0.02605296 -0.03791744 -0.032538  ]
  [ 0.02605296 -0.03791744 -0.032538  ]
  [-0.04313273  0.045066   -0.00468619]
  [ 0.00471218  0.00747512 -0.00115185]
  [ 0.02605296 -0.03791744 -0.032538  ]]]
Predict value: [[0.49918607]]
In [2]:
intermediate_output
Out[2]:
array([[[ 0.02605296, -0.03791744, -0.032538  ],
        [ 0.02605296, -0.03791744, -0.032538  ],
        [-0.04313273,  0.045066  , -0.00468619],
        [ 0.00471218,  0.00747512, -0.00115185],
        [ 0.02605296, -0.03791744, -0.032538  ]]], dtype=float32)
In [3]:
# obtain the output of an intermediate layer
intermediate_layer_model = Model(inputs=model.input,
                                 outputs=model.get_layer('flatten_1').output)
intermediate_output = intermediate_layer_model.predict(x_test[:1])

intermediate_output, intermediate_output.shape
Out[3]:
(array([[ 0.02605296, -0.03791744, -0.032538  ,  0.02605296, -0.03791744,
         -0.032538  , -0.04313273,  0.045066  , -0.00468619,  0.00471218,
          0.00747512, -0.00115185,  0.02605296, -0.03791744, -0.032538  ]],
       dtype=float32), (1, 15))
In [4]:
# dense_1 layer weights
w = np.array([[-0.5859396 ],
 [-0.33103675],
 [-0.6103033 ],
 [-0.1878556 ],
 [ 0.25851333],
 [ 0.60678446],
 [-0.41208446],
 [ 0.14432031],
 [-0.6075742 ],
 [-0.3771886 ],
 [-0.5140718 ],
 [-0.56113195],
 [ 0.39510807],
 [ 0.590024  ],
 [-0.1833529 ]])
In [5]:
# matrix multiplication
o = np.dot(intermediate_output, w) + -0.00199986
o
Out[5]:
array([[-0.00325595]])
In [6]:
# sigmoid output
import math
1 / (1 + math.exp(-o))
Out[6]:
0.4991860120498202