In [ ]:
from google.colab import drive
drive.mount('/content/gdrive')
import os
os.chdir('/content/gdrive/My Drive/finch/tensorflow1/free_chat/chinese_lccc/main')
In [2]:
%tensorflow_version 1.x
!pip install bert4keras
TensorFlow 1.x selected.
Collecting bert4keras
  Downloading https://files.pythonhosted.org/packages/54/71/9610242a003336a67b8a13162e937e0c0da9ba6daf5821e17cc96e6aa4b2/bert4keras-0.8.7.tar.gz (40kB)
     |████████████████████████████████| 40kB 2.8MB/s 
Requirement already satisfied: keras<=2.3.1 in /tensorflow-1.15.2/python3.6 (from bert4keras) (2.3.1)
Requirement already satisfied: keras-applications>=1.0.6 in /tensorflow-1.15.2/python3.6 (from keras<=2.3.1->bert4keras) (1.0.8)
Requirement already satisfied: pyyaml in /usr/local/lib/python3.6/dist-packages (from keras<=2.3.1->bert4keras) (3.13)
Requirement already satisfied: numpy>=1.9.1 in /usr/local/lib/python3.6/dist-packages (from keras<=2.3.1->bert4keras) (1.18.5)
Requirement already satisfied: h5py in /usr/local/lib/python3.6/dist-packages (from keras<=2.3.1->bert4keras) (2.10.0)
Requirement already satisfied: six>=1.9.0 in /usr/local/lib/python3.6/dist-packages (from keras<=2.3.1->bert4keras) (1.15.0)
Requirement already satisfied: scipy>=0.14 in /usr/local/lib/python3.6/dist-packages (from keras<=2.3.1->bert4keras) (1.4.1)
Requirement already satisfied: keras-preprocessing>=1.0.5 in /usr/local/lib/python3.6/dist-packages (from keras<=2.3.1->bert4keras) (1.1.2)
Building wheels for collected packages: bert4keras
  Building wheel for bert4keras (setup.py) ... done
  Created wheel for bert4keras: filename=bert4keras-0.8.7-cp36-none-any.whl size=39155 sha256=c279e58fd439683b079219e40834f2c2c645cb8949019e785e53ebc787ec4f8d
  Stored in directory: /root/.cache/pip/wheels/53/71/4c/3c37d5b70183ce174cbf51700bef11a3dcd11370ccf47052ff
Successfully built bert4keras
Installing collected packages: bert4keras
Successfully installed bert4keras-0.8.7
In [3]:
import numpy as np
from bert4keras.models import build_transformer_model
from bert4keras.tokenizers import Tokenizer
from bert4keras.snippets import AutoRegressiveDecoder
from bert4keras.snippets import uniout
Using TensorFlow backend.
In [4]:
config_path = '../model/nezha_gpt_dialog/config.json'
checkpoint_path = '../model/nezha_gpt_dialog/model.ckpt'
dict_path = '../model/nezha_gpt_dialog/vocab.txt'

tokenizer = Tokenizer(dict_path, do_lower_case=True)

model = build_transformer_model(
    config_path,
    checkpoint_path,
    model='nezha',
    application='lm',
)
model.summary()
WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/python/ops/resource_variable_ops.py:1630: calling BaseResourceVariable.__init__ (from tensorflow.python.ops.resource_variable_ops) with constraint is deprecated and will be removed in a future version.
Instructions for updating:
If using Keras pass *_constraint arguments to layers.
Model: "model_1"
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
Input-Token (InputLayer)        (None, None)         0                                            
__________________________________________________________________________________________________
Input-Segment (InputLayer)      (None, None)         0                                            
__________________________________________________________________________________________________
Embedding-Token (Embedding)     multiple             10901760    Input-Token[0][0]                
                                                                 MLM-Norm[0][0]                   
__________________________________________________________________________________________________
Embedding-Segment (Embedding)   (None, None, 768)    1536        Input-Segment[0][0]              
__________________________________________________________________________________________________
Embedding-Token-Segment (Add)   (None, None, 768)    0           Embedding-Token[0][0]            
                                                                 Embedding-Segment[0][0]          
__________________________________________________________________________________________________
Embedding-Norm (LayerNormalizat (None, None, 768)    1536        Embedding-Token-Segment[0][0]    
__________________________________________________________________________________________________
Embedding-Dropout (Dropout)     (None, None, 768)    0           Embedding-Norm[0][0]             
__________________________________________________________________________________________________
Attention-LM-Mask (Lambda)      (1, 1, None, None)   0           Input-Token[0][0]                
__________________________________________________________________________________________________
Embedding-Relative-Position (Re (None, None, 64)     8256        Embedding-Dropout[0][0]          
                                                                 Embedding-Dropout[0][0]          
__________________________________________________________________________________________________
Transformer-0-MultiHeadSelfAtte (None, None, 768)    2362368     Embedding-Dropout[0][0]          
                                                                 Embedding-Dropout[0][0]          
                                                                 Embedding-Dropout[0][0]          
                                                                 Attention-LM-Mask[0][0]          
                                                                 Embedding-Relative-Position[0][0]
__________________________________________________________________________________________________
Transformer-0-MultiHeadSelfAtte (None, None, 768)    0           Transformer-0-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-0-MultiHeadSelfAtte (None, None, 768)    0           Embedding-Dropout[0][0]          
                                                                 Transformer-0-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-0-MultiHeadSelfAtte (None, None, 768)    1536        Transformer-0-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-0-FeedForward (Feed (None, None, 768)    4722432     Transformer-0-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-0-FeedForward-Dropo (None, None, 768)    0           Transformer-0-FeedForward[0][0]  
__________________________________________________________________________________________________
Transformer-0-FeedForward-Add ( (None, None, 768)    0           Transformer-0-MultiHeadSelfAttent
                                                                 Transformer-0-FeedForward-Dropout
__________________________________________________________________________________________________
Transformer-0-FeedForward-Norm  (None, None, 768)    1536        Transformer-0-FeedForward-Add[0][
__________________________________________________________________________________________________
Transformer-1-MultiHeadSelfAtte (None, None, 768)    2362368     Transformer-0-FeedForward-Norm[0]
                                                                 Transformer-0-FeedForward-Norm[0]
                                                                 Transformer-0-FeedForward-Norm[0]
                                                                 Attention-LM-Mask[0][0]          
                                                                 Embedding-Relative-Position[0][0]
__________________________________________________________________________________________________
Transformer-1-MultiHeadSelfAtte (None, None, 768)    0           Transformer-1-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-1-MultiHeadSelfAtte (None, None, 768)    0           Transformer-0-FeedForward-Norm[0]
                                                                 Transformer-1-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-1-MultiHeadSelfAtte (None, None, 768)    1536        Transformer-1-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-1-FeedForward (Feed (None, None, 768)    4722432     Transformer-1-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-1-FeedForward-Dropo (None, None, 768)    0           Transformer-1-FeedForward[0][0]  
__________________________________________________________________________________________________
Transformer-1-FeedForward-Add ( (None, None, 768)    0           Transformer-1-MultiHeadSelfAttent
                                                                 Transformer-1-FeedForward-Dropout
__________________________________________________________________________________________________
Transformer-1-FeedForward-Norm  (None, None, 768)    1536        Transformer-1-FeedForward-Add[0][
__________________________________________________________________________________________________
Transformer-2-MultiHeadSelfAtte (None, None, 768)    2362368     Transformer-1-FeedForward-Norm[0]
                                                                 Transformer-1-FeedForward-Norm[0]
                                                                 Transformer-1-FeedForward-Norm[0]
                                                                 Attention-LM-Mask[0][0]          
                                                                 Embedding-Relative-Position[0][0]
__________________________________________________________________________________________________
Transformer-2-MultiHeadSelfAtte (None, None, 768)    0           Transformer-2-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-2-MultiHeadSelfAtte (None, None, 768)    0           Transformer-1-FeedForward-Norm[0]
                                                                 Transformer-2-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-2-MultiHeadSelfAtte (None, None, 768)    1536        Transformer-2-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-2-FeedForward (Feed (None, None, 768)    4722432     Transformer-2-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-2-FeedForward-Dropo (None, None, 768)    0           Transformer-2-FeedForward[0][0]  
__________________________________________________________________________________________________
Transformer-2-FeedForward-Add ( (None, None, 768)    0           Transformer-2-MultiHeadSelfAttent
                                                                 Transformer-2-FeedForward-Dropout
__________________________________________________________________________________________________
Transformer-2-FeedForward-Norm  (None, None, 768)    1536        Transformer-2-FeedForward-Add[0][
__________________________________________________________________________________________________
Transformer-3-MultiHeadSelfAtte (None, None, 768)    2362368     Transformer-2-FeedForward-Norm[0]
                                                                 Transformer-2-FeedForward-Norm[0]
                                                                 Transformer-2-FeedForward-Norm[0]
                                                                 Attention-LM-Mask[0][0]          
                                                                 Embedding-Relative-Position[0][0]
__________________________________________________________________________________________________
Transformer-3-MultiHeadSelfAtte (None, None, 768)    0           Transformer-3-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-3-MultiHeadSelfAtte (None, None, 768)    0           Transformer-2-FeedForward-Norm[0]
                                                                 Transformer-3-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-3-MultiHeadSelfAtte (None, None, 768)    1536        Transformer-3-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-3-FeedForward (Feed (None, None, 768)    4722432     Transformer-3-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-3-FeedForward-Dropo (None, None, 768)    0           Transformer-3-FeedForward[0][0]  
__________________________________________________________________________________________________
Transformer-3-FeedForward-Add ( (None, None, 768)    0           Transformer-3-MultiHeadSelfAttent
                                                                 Transformer-3-FeedForward-Dropout
__________________________________________________________________________________________________
Transformer-3-FeedForward-Norm  (None, None, 768)    1536        Transformer-3-FeedForward-Add[0][
__________________________________________________________________________________________________
Transformer-4-MultiHeadSelfAtte (None, None, 768)    2362368     Transformer-3-FeedForward-Norm[0]
                                                                 Transformer-3-FeedForward-Norm[0]
                                                                 Transformer-3-FeedForward-Norm[0]
                                                                 Attention-LM-Mask[0][0]          
                                                                 Embedding-Relative-Position[0][0]
__________________________________________________________________________________________________
Transformer-4-MultiHeadSelfAtte (None, None, 768)    0           Transformer-4-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-4-MultiHeadSelfAtte (None, None, 768)    0           Transformer-3-FeedForward-Norm[0]
                                                                 Transformer-4-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-4-MultiHeadSelfAtte (None, None, 768)    1536        Transformer-4-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-4-FeedForward (Feed (None, None, 768)    4722432     Transformer-4-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-4-FeedForward-Dropo (None, None, 768)    0           Transformer-4-FeedForward[0][0]  
__________________________________________________________________________________________________
Transformer-4-FeedForward-Add ( (None, None, 768)    0           Transformer-4-MultiHeadSelfAttent
                                                                 Transformer-4-FeedForward-Dropout
__________________________________________________________________________________________________
Transformer-4-FeedForward-Norm  (None, None, 768)    1536        Transformer-4-FeedForward-Add[0][
__________________________________________________________________________________________________
Transformer-5-MultiHeadSelfAtte (None, None, 768)    2362368     Transformer-4-FeedForward-Norm[0]
                                                                 Transformer-4-FeedForward-Norm[0]
                                                                 Transformer-4-FeedForward-Norm[0]
                                                                 Attention-LM-Mask[0][0]          
                                                                 Embedding-Relative-Position[0][0]
__________________________________________________________________________________________________
Transformer-5-MultiHeadSelfAtte (None, None, 768)    0           Transformer-5-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-5-MultiHeadSelfAtte (None, None, 768)    0           Transformer-4-FeedForward-Norm[0]
                                                                 Transformer-5-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-5-MultiHeadSelfAtte (None, None, 768)    1536        Transformer-5-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-5-FeedForward (Feed (None, None, 768)    4722432     Transformer-5-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-5-FeedForward-Dropo (None, None, 768)    0           Transformer-5-FeedForward[0][0]  
__________________________________________________________________________________________________
Transformer-5-FeedForward-Add ( (None, None, 768)    0           Transformer-5-MultiHeadSelfAttent
                                                                 Transformer-5-FeedForward-Dropout
__________________________________________________________________________________________________
Transformer-5-FeedForward-Norm  (None, None, 768)    1536        Transformer-5-FeedForward-Add[0][
__________________________________________________________________________________________________
Transformer-6-MultiHeadSelfAtte (None, None, 768)    2362368     Transformer-5-FeedForward-Norm[0]
                                                                 Transformer-5-FeedForward-Norm[0]
                                                                 Transformer-5-FeedForward-Norm[0]
                                                                 Attention-LM-Mask[0][0]          
                                                                 Embedding-Relative-Position[0][0]
__________________________________________________________________________________________________
Transformer-6-MultiHeadSelfAtte (None, None, 768)    0           Transformer-6-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-6-MultiHeadSelfAtte (None, None, 768)    0           Transformer-5-FeedForward-Norm[0]
                                                                 Transformer-6-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-6-MultiHeadSelfAtte (None, None, 768)    1536        Transformer-6-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-6-FeedForward (Feed (None, None, 768)    4722432     Transformer-6-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-6-FeedForward-Dropo (None, None, 768)    0           Transformer-6-FeedForward[0][0]  
__________________________________________________________________________________________________
Transformer-6-FeedForward-Add ( (None, None, 768)    0           Transformer-6-MultiHeadSelfAttent
                                                                 Transformer-6-FeedForward-Dropout
__________________________________________________________________________________________________
Transformer-6-FeedForward-Norm  (None, None, 768)    1536        Transformer-6-FeedForward-Add[0][
__________________________________________________________________________________________________
Transformer-7-MultiHeadSelfAtte (None, None, 768)    2362368     Transformer-6-FeedForward-Norm[0]
                                                                 Transformer-6-FeedForward-Norm[0]
                                                                 Transformer-6-FeedForward-Norm[0]
                                                                 Attention-LM-Mask[0][0]          
                                                                 Embedding-Relative-Position[0][0]
__________________________________________________________________________________________________
Transformer-7-MultiHeadSelfAtte (None, None, 768)    0           Transformer-7-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-7-MultiHeadSelfAtte (None, None, 768)    0           Transformer-6-FeedForward-Norm[0]
                                                                 Transformer-7-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-7-MultiHeadSelfAtte (None, None, 768)    1536        Transformer-7-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-7-FeedForward (Feed (None, None, 768)    4722432     Transformer-7-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-7-FeedForward-Dropo (None, None, 768)    0           Transformer-7-FeedForward[0][0]  
__________________________________________________________________________________________________
Transformer-7-FeedForward-Add ( (None, None, 768)    0           Transformer-7-MultiHeadSelfAttent
                                                                 Transformer-7-FeedForward-Dropout
__________________________________________________________________________________________________
Transformer-7-FeedForward-Norm  (None, None, 768)    1536        Transformer-7-FeedForward-Add[0][
__________________________________________________________________________________________________
Transformer-8-MultiHeadSelfAtte (None, None, 768)    2362368     Transformer-7-FeedForward-Norm[0]
                                                                 Transformer-7-FeedForward-Norm[0]
                                                                 Transformer-7-FeedForward-Norm[0]
                                                                 Attention-LM-Mask[0][0]          
                                                                 Embedding-Relative-Position[0][0]
__________________________________________________________________________________________________
Transformer-8-MultiHeadSelfAtte (None, None, 768)    0           Transformer-8-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-8-MultiHeadSelfAtte (None, None, 768)    0           Transformer-7-FeedForward-Norm[0]
                                                                 Transformer-8-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-8-MultiHeadSelfAtte (None, None, 768)    1536        Transformer-8-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-8-FeedForward (Feed (None, None, 768)    4722432     Transformer-8-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-8-FeedForward-Dropo (None, None, 768)    0           Transformer-8-FeedForward[0][0]  
__________________________________________________________________________________________________
Transformer-8-FeedForward-Add ( (None, None, 768)    0           Transformer-8-MultiHeadSelfAttent
                                                                 Transformer-8-FeedForward-Dropout
__________________________________________________________________________________________________
Transformer-8-FeedForward-Norm  (None, None, 768)    1536        Transformer-8-FeedForward-Add[0][
__________________________________________________________________________________________________
Transformer-9-MultiHeadSelfAtte (None, None, 768)    2362368     Transformer-8-FeedForward-Norm[0]
                                                                 Transformer-8-FeedForward-Norm[0]
                                                                 Transformer-8-FeedForward-Norm[0]
                                                                 Attention-LM-Mask[0][0]          
                                                                 Embedding-Relative-Position[0][0]
__________________________________________________________________________________________________
Transformer-9-MultiHeadSelfAtte (None, None, 768)    0           Transformer-9-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-9-MultiHeadSelfAtte (None, None, 768)    0           Transformer-8-FeedForward-Norm[0]
                                                                 Transformer-9-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-9-MultiHeadSelfAtte (None, None, 768)    1536        Transformer-9-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-9-FeedForward (Feed (None, None, 768)    4722432     Transformer-9-MultiHeadSelfAttent
__________________________________________________________________________________________________
Transformer-9-FeedForward-Dropo (None, None, 768)    0           Transformer-9-FeedForward[0][0]  
__________________________________________________________________________________________________
Transformer-9-FeedForward-Add ( (None, None, 768)    0           Transformer-9-MultiHeadSelfAttent
                                                                 Transformer-9-FeedForward-Dropout
__________________________________________________________________________________________________
Transformer-9-FeedForward-Norm  (None, None, 768)    1536        Transformer-9-FeedForward-Add[0][
__________________________________________________________________________________________________
Transformer-10-MultiHeadSelfAtt (None, None, 768)    2362368     Transformer-9-FeedForward-Norm[0]
                                                                 Transformer-9-FeedForward-Norm[0]
                                                                 Transformer-9-FeedForward-Norm[0]
                                                                 Attention-LM-Mask[0][0]          
                                                                 Embedding-Relative-Position[0][0]
__________________________________________________________________________________________________
Transformer-10-MultiHeadSelfAtt (None, None, 768)    0           Transformer-10-MultiHeadSelfAtten
__________________________________________________________________________________________________
Transformer-10-MultiHeadSelfAtt (None, None, 768)    0           Transformer-9-FeedForward-Norm[0]
                                                                 Transformer-10-MultiHeadSelfAtten
__________________________________________________________________________________________________
Transformer-10-MultiHeadSelfAtt (None, None, 768)    1536        Transformer-10-MultiHeadSelfAtten
__________________________________________________________________________________________________
Transformer-10-FeedForward (Fee (None, None, 768)    4722432     Transformer-10-MultiHeadSelfAtten
__________________________________________________________________________________________________
Transformer-10-FeedForward-Drop (None, None, 768)    0           Transformer-10-FeedForward[0][0] 
__________________________________________________________________________________________________
Transformer-10-FeedForward-Add  (None, None, 768)    0           Transformer-10-MultiHeadSelfAtten
                                                                 Transformer-10-FeedForward-Dropou
__________________________________________________________________________________________________
Transformer-10-FeedForward-Norm (None, None, 768)    1536        Transformer-10-FeedForward-Add[0]
__________________________________________________________________________________________________
Transformer-11-MultiHeadSelfAtt (None, None, 768)    2362368     Transformer-10-FeedForward-Norm[0
                                                                 Transformer-10-FeedForward-Norm[0
                                                                 Transformer-10-FeedForward-Norm[0
                                                                 Attention-LM-Mask[0][0]          
                                                                 Embedding-Relative-Position[0][0]
__________________________________________________________________________________________________
Transformer-11-MultiHeadSelfAtt (None, None, 768)    0           Transformer-11-MultiHeadSelfAtten
__________________________________________________________________________________________________
Transformer-11-MultiHeadSelfAtt (None, None, 768)    0           Transformer-10-FeedForward-Norm[0
                                                                 Transformer-11-MultiHeadSelfAtten
__________________________________________________________________________________________________
Transformer-11-MultiHeadSelfAtt (None, None, 768)    1536        Transformer-11-MultiHeadSelfAtten
__________________________________________________________________________________________________
Transformer-11-FeedForward (Fee (None, None, 768)    4722432     Transformer-11-MultiHeadSelfAtten
__________________________________________________________________________________________________
Transformer-11-FeedForward-Drop (None, None, 768)    0           Transformer-11-FeedForward[0][0] 
__________________________________________________________________________________________________
Transformer-11-FeedForward-Add  (None, None, 768)    0           Transformer-11-MultiHeadSelfAtten
                                                                 Transformer-11-FeedForward-Dropou
__________________________________________________________________________________________________
Transformer-11-FeedForward-Norm (None, None, 768)    1536        Transformer-11-FeedForward-Add[0]
__________________________________________________________________________________________________
MLM-Dense (Dense)               (None, None, 768)    590592      Transformer-11-FeedForward-Norm[0
__________________________________________________________________________________________________
MLM-Norm (LayerNormalization)   (None, None, 768)    1536        MLM-Dense[0][0]                  
__________________________________________________________________________________________________
MLM-Bias (BiasAdd)              (None, None, 14195)  14195       Embedding-Token[1][0]            
__________________________________________________________________________________________________
MLM-Activation (Activation)     (None, None, 14195)  0           MLM-Bias[0][0]                   
==================================================================================================
Total params: 96,573,875
Trainable params: 96,565,619
Non-trainable params: 8,256
__________________________________________________________________________________________________
In [5]:
class ChatBot(AutoRegressiveDecoder):
    @AutoRegressiveDecoder.wraps(default_rtype='probas')
    def predict(self, inputs, output_ids, states):
        token_ids, segment_ids = inputs
        token_ids = np.concatenate([token_ids, output_ids], 1)
        curr_segment_ids = np.ones_like(output_ids) - segment_ids[0, -1]
        segment_ids = np.concatenate([segment_ids, curr_segment_ids], 1)
        return model.predict([token_ids, segment_ids])[:, -1]

    def response(self, texts, topk=5):
        token_ids, segment_ids = [tokenizer._token_start_id], [0]
        for i, text in enumerate(texts):
            ids = tokenizer.encode(text)[0][1:]
            token_ids.extend(ids)
            segment_ids.extend([i % 2] * len(ids))
        results = self.random_sample([token_ids, segment_ids], 1, topk)
        return tokenizer.decode(results[0])
In [6]:
chatbot = ChatBot(start_id=None, end_id=tokenizer._token_end_id, maxlen=32)

query_li = [
  '你好',
  '早上好',
  '晚上好',
  '再见',
  '好久不见',
  '想死你了',
  '谢谢你',
  '爱你',
  '你叫什么名字',
  '你几岁了',
  '现在几点了',
  '今天天气怎么样',
  '我们现在在哪里',
  '你能给我讲个笑话吗',
  '你是男孩还是女孩呀',
  '你会几种语言呀',
  '你能陪我玩吗',
  '说话可以大声一点吗',
  '天气真好',
  '天气太糟糕了',
  '下雨了',
  '雨好大',
  '我讨厌艳阳天',
  '好晒啊',
  '今天好冷',
  '今天好热',
  '风好大',
  '雾太大了看不清路',
  '打雷了好可怕',
  '下雪了诶',
  '好烦啊',
  '好开心',
  '太激动了',
  '我好难过',
  '我想哭',
  '太好笑了',
  '我好伤心',
  '心好痛',
  '好累啊',
  '我好疲惫',
  '我爱你',
  '我讨厌你',
  '你真是太棒啦',
  '你好厉害啊',
  '吓死我了',
  '我想回家',
  '我想爸妈了',
  '不知道小孩在家有没有听话',
  '想回家撸猫',
]
for q in query_li:
  print('Q:', q)
  print('A:', chatbot.response([q]))
  print()
Q: 你好
WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/keras/backend/tensorflow_backend.py:422: The name tf.global_variables is deprecated. Please use tf.compat.v1.global_variables instead.

A: 您好

Q: 早上好
A: 早啊早啊

Q: 晚上好
A: 晚上好!

Q: 再见
A: 我要走了

Q: 好久不见
A: 是啊,最近怎么样啊

Q: 想死你了
A: 哈哈哈哈哈哈哈哈哈哈哈哈哈哈哈哈哈哈哈哈哈哈哈哈哈哈哈哈哈哈哈哈

Q: 谢谢你
A: 不谢不谢,

Q: 爱你
A: 我也爱你,你最美

Q: 你叫什么名字
A: 你是不是想说我的名字太俗了

Q: 你几岁了
A: 我不知道

Q: 现在几点了
A: 我也不知道啊

Q: 今天天气怎么样
A: 不错

Q: 我们现在在哪里
A: 在我们宿舍楼下。。。

Q: 你能给我讲个笑话吗
A: 不能

Q: 你是男孩还是女孩呀
A: 我是萌妹纸

Q: 你会几种语言呀
A: 只会说一句

Q: 你能陪我玩吗
A: 你猜

Q: 说话可以大声一点吗
A: 我不

Q: 天气真好
A: 是啊。

Q: 天气太糟糕了
A: 是的

Q: 下雨了
A: 我也是

Q: 雨好大
A: 对呀,我们还穿短袖

Q: 我讨厌艳阳天
A: 我喜欢

Q: 好晒啊
A: 是啊,不是很热的

Q: 今天好冷
A: 是啊。

Q: 今天好热
A: 对啊,我都热疯

Q: 风好大
A: 嗯嗯嗯,风好大,冷

Q: 雾太大了看不清路
A: 你是不是傻啊

Q: 打雷了好可怕
A: 我们这边下雨了

Q: 下雪了诶
A: 下了一天了

Q: 好烦啊
A: 我也是

Q: 好开心
A: 哈哈哈,我也是!

Q: 太激动了
A: 哈哈

Q: 我好难过
A: 你怎么难过

Q: 我想哭
A: 哭吧哭吧,我陪你哭

Q: 太好笑了
A: 哈哈

Q: 我好伤心
A: 你是不是想笑死我!!下次再给你拍

Q: 心好痛
A: 怎么了

Q: 好累啊
A: 累了就睡觉

Q: 我好疲惫
A: 你不是一直都很疲惫么

Q: 我爱你
A: 爱我就来见面

Q: 我讨厌你
A: 我也讨厌你

Q: 你真是太棒啦
A: 谢谢!

Q: 你好厉害啊
A: 你也可以的

Q: 吓死我了
A: 哈哈哈哈哈哈哈哈哈哈哈哈哈哈哈哈笑死我了

Q: 我想回家
A: 回来吧,家真的好啊!

Q: 我想爸妈了
A: 想我吗

Q: 不知道小孩在家有没有听话
A: 我也是听不懂啊

Q: 想回家撸猫
A: 回呀回呀