#!/usr/bin/env python # coding: utf-8 # In[3]: # %load /Users/facai/Study/book_notes/preconfig.py get_ipython().run_line_magic('matplotlib', 'inline') import matplotlib.pyplot as plt import seaborn as sns sns.set(color_codes=True) sns.set(font='SimHei', font_scale=2.5) plt.rcParams['axes.grid'] = False import tensorflow as tf def show_image(filename, figsize=None, res_dir=True): if figsize: plt.figure(figsize=figsize) if res_dir: filename = './res/{}'.format(filename) plt.imshow(plt.imread(filename)) # TensorFlow图相关知识简介 # ============ # # 参考: https://www.tensorflow.org/programmers_guide/graphs # # 1. tf.Graph # # 2. op, tensor # # 3. variable # # 4. name_scope, variable_scop, collection # # 5. save and restore # ![](https://www.tensorflow.org/images/tensors_flowing.gif) # ### 0. tf.Graph # # tf.Graph: GraphDef => *.pb文件 # + Graph structure: Operator, Tensor-like object, 连接关系 # + Graph collections: metadata # # tf.Session(): # + 本地 # + 分布式:master (worker_0) # # ```python # with tf.Session("grpc://example.org:2222"): # pass # ``` # # 状态(Variable) => *.ckpt文件 # ### 1. 算子与Tensor # In[6]: a = tf.constant(1) b = a * 2 b # In[7]: b.op # In[11]: b.consumers() # In[15]: a.op # In[19]: a.consumers() # tensorflow/python/framework/ops.py # + Tensor: # - device # - graph # - op # - consumers # - _override_operator: # 数学算子:math_op.add 重载 `__add__` # In[8]: b.op.outputs # In[9]: list(b.op.inputs) # In[14]: print(b.op.inputs[0]) print(a) # In[17]: list(a.op.inputs) # + Operator: NodeDef # - device # - inputs # - outputs # - graph # - node_def # - op_def # - **run** # - traceback # # Operator和Tensor构成无向图 # # ```python # # run # sess.run([b]) # ``` # # 参考: # + tf.Tensor: https://www.tensorflow.org/versions/master/api_docs/python/tf/Tensor # + tf.Operator: https://www.tensorflow.org/versions/master/api_docs/python/tf/Operation # ### 2. 变量 # In[20]: v = tf.Variable([0]) c = b + v c # In[23]: list(c.op.inputs) # In[25]: c.op.inputs[1].op # In[26]: list(c.op.inputs[1].op.inputs) # In[21]: v # 实际上,对变量的读是通过`tf.identity`算子得到: # ```python # c = tf.add(b, tf.identity(v)) # ``` # # # + Variable: act like Tensor # - [ops](https://stackoverflow.com/questions/40817665/whats-the-difference-between-variable-and-resourcevariable-in-tensorflow) # 1. VariableV2 # 2. ResourceVariable # - _AsTensor -> g.as_graph_element # - value: Identity(variable) -> Tensor # - assign # - init_op: Assign(self, init_value) # - to_proto: VariableDef # # 参考:https://www.tensorflow.org/versions/master/api_docs/python/tf/Variable # 3. collections # =============== # # + collections: 按作用分组 # - Variable: global_varialbe # - 更多见[tf.GraphKeys](https://www.tensorflow.org/versions/master/api_docs/python/tf/GraphKeys) # + name_scope: Operator, Tensor # + variable_scope: Variable # - 伴生name_scope # # ```python # class Layer: # def build(self): # pass # def call(self, inputs): # pass # ``` # # ![](https://www.tensorflow.org/images/mnist_deep.png) # # 参考:https://www.tensorflow.org/programmers_guide/summaries_and_tensorboard # ### 4. 保存与恢复 # In[58]: graph_a = tf.Graph() with graph_a.as_default(): v1 = tf.get_variable("v1", shape=[3], initializer = tf.zeros_initializer) print(v1) inc_v1 = v1.assign(v1+1) init_op = tf.global_variables_initializer() saver = tf.train.Saver() with tf.Session() as sess: sess.run(init_op) inc_v1.op.run() save_path = saver.save(sess, "./tmp/model.ckpt", write_meta_graph=True) print("Model saved in path: %s" % save_path) pb_path = tf.train.write_graph(graph_a.as_graph_def(), "./tmp/", "graph.pbtxt", as_text=True) print("Graph saved in path: %s" % pb_path) # graph.pbtxt部份示意:`v1 + 1`: # # ```bash # node { # name: "add" # op: "Add" # input: "v1/read" # input: "add/y" # attr { # key: "T" # value { # type: DT_FLOAT # } # } # } # ``` # In[62]: graph_b = tf.Graph() with graph_b.as_default(): with tf.Session() as sess: saver = tf.train.import_meta_graph('./tmp/model.ckpt.meta') saver.restore(sess, "./tmp/model.ckpt") print(graph_b.get_operations()) v1 = graph_b.get_tensor_by_name("v1:0") print("------------------") print("v1 : %s" % v1.eval(session=sess)) # 总结: # + `tf.train.Saver`会保存GraphDef和Variable信息,用它可以直接恢复图。 # - [tf.train.import_meta_graph](https://www.tensorflow.org/versions/master/api_docs/python/tf/train/import_meta_graph) # - [Exporting and Importing a MetaGraph](https://www.tensorflow.org/versions/master/api_guides/python/meta_graph) # - 缺点:无法指input_tensor。 # + `tf.train.write_graph`、`tf.GraphDef`和`tf.import_graph_def`,主要用于固化模型(只有GraphDef信息)。 # # # 参考: # + https://stackoverflow.com/questions/38641887/how-to-save-a-trained-tensorflow-model-for-later-use-for-application # + https://www.tensorflow.org/programmers_guide/saved_model#overview_of_saving_and_restoring_models # In[ ]: