Deep Learning Models -- A collection of various deep learning architectures, models, and tips for TensorFlow and PyTorch in Jupyter Notebooks.

In [1]:
%load_ext watermark
%watermark -a 'Sebastian Raschka' -v -p tensorflow
Sebastian Raschka 

CPython 3.7.3
IPython 7.6.1

tensorflow 1.13.1

Convolutional General Adversarial Networks with Label Smoothing

Same as ./gan-conv.ipynb but with label smoothing.

Here, the label smoothing approach is to replace real image labels (1's) by 0.9, based on the idea in

  • Salimans, Tim, Ian Goodfellow, Wojciech Zaremba, Vicki Cheung, Alec Radford, and Xi Chen. "Improved techniques for training GANs." In Advances in Neural Information Processing Systems, pp. 2234-2242. 2016.
In [2]:
import numpy as np
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
import pickle as pkl

tf.test.gpu_device_name()
Out[2]:
'/device:GPU:0'
In [3]:
### Abbreviatiuons
# dis_*: discriminator network
# gen_*: generator network

########################
### Helper functions
########################

def leaky_relu(x, alpha=0.0001):
    return tf.maximum(alpha * x, x)


########################
### DATASET
########################

mnist = input_data.read_data_sets('MNIST_data')


#########################
### SETTINGS
#########################

# Hyperparameters
learning_rate = 0.001
training_epochs = 50
batch_size = 64
dropout_rate = 0.5

# Architecture
dis_input_size = 784
gen_input_size = 100

# Other settings
print_interval = 200

#########################
### GRAPH DEFINITION
#########################

g = tf.Graph()
with g.as_default():
    
    # Placeholders for settings
    dropout = tf.placeholder(tf.float32, shape=None, name='dropout')
    is_training = tf.placeholder(tf.bool, shape=None, name='is_training')
    
    # Input data
    dis_x = tf.placeholder(tf.float32, shape=[None, dis_input_size],
                           name='discriminator_inputs')     
    gen_x = tf.placeholder(tf.float32, [None, gen_input_size],
                           name='generator_inputs')


    ##################
    # Generator Model
    ##################

    with tf.variable_scope('generator'):
        
        # 100 => 784 => 7x7x64
        gen_fc = tf.layers.dense(inputs=gen_x, units=3136,
                                 bias_initializer=None, # no bias required when using batch_norm
                                 activation=None)
        gen_fc = tf.layers.batch_normalization(gen_fc, training=is_training)
        gen_fc = leaky_relu(gen_fc)
        gen_fc = tf.reshape(gen_fc, (-1, 7, 7, 64))
        
        # 7x7x64 => 14x14x32
        deconv1 = tf.layers.conv2d_transpose(gen_fc, filters=32, 
                                             kernel_size=(3, 3), strides=(2, 2), 
                                             padding='same',
                                             bias_initializer=None,
                                             activation=None)
        deconv1 = tf.layers.batch_normalization(deconv1, training=is_training)
        deconv1 = leaky_relu(deconv1)     
        deconv1 = tf.layers.dropout(deconv1, rate=dropout_rate)
        
        # 14x14x32 => 28x28x16
        deconv2 = tf.layers.conv2d_transpose(deconv1, filters=16, 
                                             kernel_size=(3, 3), strides=(2, 2), 
                                             padding='same',
                                             bias_initializer=None,
                                             activation=None)
        deconv2 = tf.layers.batch_normalization(deconv2, training=is_training)
        deconv2 = leaky_relu(deconv2)     
        deconv2 = tf.layers.dropout(deconv2, rate=dropout_rate)
        
        # 28x28x16 => 28x28x8
        deconv3 = tf.layers.conv2d_transpose(deconv2, filters=8, 
                                             kernel_size=(3, 3), strides=(1, 1), 
                                             padding='same',
                                             bias_initializer=None,
                                             activation=None)
        deconv3 = tf.layers.batch_normalization(deconv3, training=is_training)
        deconv3 = leaky_relu(deconv3)     
        deconv3 = tf.layers.dropout(deconv3, rate=dropout_rate)
        
        # 28x28x8 => 28x28x1
        gen_logits = tf.layers.conv2d_transpose(deconv3, filters=1, 
                                                kernel_size=(3, 3), strides=(1, 1), 
                                                padding='same',
                                                bias_initializer=None,
                                                activation=None)
        gen_out = tf.tanh(gen_logits, 'generator_outputs')


    ######################
    # Discriminator Model
    ######################
    
    def build_discriminator_graph(input_x, reuse=None):

        with tf.variable_scope('discriminator', reuse=reuse):
            
            # 28x28x1 => 14x14x8
            conv_input = tf.reshape(input_x, (-1, 28, 28, 1))
            conv1 = tf.layers.conv2d(conv_input, filters=8, kernel_size=(3, 3),
                                     strides=(2, 2), padding='same',
                                     bias_initializer=None,
                                     activation=None)
            conv1 = tf.layers.batch_normalization(conv1, training=is_training)
            conv1 = leaky_relu(conv1)
            conv1 = tf.layers.dropout(conv1, rate=dropout_rate)
            
            # 14x14x8 => 7x7x32
            conv2 = tf.layers.conv2d(conv1, filters=32, kernel_size=(3, 3),
                                     strides=(2, 2), padding='same',
                                     bias_initializer=None,
                                     activation=None)
            conv2 = tf.layers.batch_normalization(conv2, training=is_training)
            conv2 = leaky_relu(conv2)
            conv2 = tf.layers.dropout(conv2, rate=dropout_rate)

            # fully connected layer
            fc_input = tf.reshape(conv2, (-1, 7*7*32))
            logits = tf.layers.dense(inputs=fc_input, units=1, activation=None)
            out = tf.sigmoid(logits)
            
        return logits, out    

    # Create a discriminator for real data and a discriminator for fake data
    dis_real_logits, dis_real_out = build_discriminator_graph(dis_x, reuse=False)
    dis_fake_logits, dis_fake_out = build_discriminator_graph(gen_out, reuse=True)


    #####################################
    # Generator and Discriminator Losses
    #####################################
    
    # Two discriminator cost components: loss on real data + loss on fake data
    # Real data has class label 1, fake data has class label 0
    dis_real_loss = tf.nn.sigmoid_cross_entropy_with_logits(logits=dis_real_logits, 
                                                            labels=tf.ones_like(dis_real_logits) * 0.9)
    dis_fake_loss = tf.nn.sigmoid_cross_entropy_with_logits(logits=dis_fake_logits, 
                                                            labels=tf.zeros_like(dis_fake_logits))
    dis_cost = tf.add(tf.reduce_mean(dis_fake_loss), 
                      tf.reduce_mean(dis_real_loss), 
                      name='discriminator_cost')
 
    # Generator cost: difference between dis. prediction and label "1" for real images
    gen_loss = tf.nn.sigmoid_cross_entropy_with_logits(logits=dis_fake_logits,
                                                       labels=tf.ones_like(dis_fake_logits) * 0.9)
    gen_cost = tf.reduce_mean(gen_loss, name='generator_cost')
    
    
    #########################################
    # Generator and Discriminator Optimizers
    #########################################
      
    dis_optimizer = tf.train.AdamOptimizer(learning_rate)
    dis_train_vars = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope='discriminator')
    dis_update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS, scope='discriminator')
    
    with tf.control_dependencies(dis_update_ops): # required to upd. batch_norm params
        dis_train = dis_optimizer.minimize(dis_cost, var_list=dis_train_vars,
                                           name='train_discriminator')
    
    gen_optimizer = tf.train.AdamOptimizer(learning_rate)
    gen_train_vars = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope='generator')
    gen_update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS, scope='generator')
    
    with tf.control_dependencies(gen_update_ops): # required to upd. batch_norm params
        gen_train = gen_optimizer.minimize(gen_cost, var_list=gen_train_vars,
                                           name='train_generator')
    
    # Saver to save session for reuse
    saver = tf.train.Saver()
WARNING:tensorflow:From <ipython-input-3-05c8e8f3b1eb>:17: read_data_sets (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.
Instructions for updating:
Please use alternatives such as official/mnist/dataset.py from tensorflow/models.
WARNING:tensorflow:From /home/raschka/miniconda3/lib/python3.7/site-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:260: maybe_download (from tensorflow.contrib.learn.python.learn.datasets.base) is deprecated and will be removed in a future version.
Instructions for updating:
Please write your own downloading logic.
WARNING:tensorflow:From /home/raschka/miniconda3/lib/python3.7/site-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:262: extract_images (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.
Instructions for updating:
Please use tf.data to implement this functionality.
Extracting MNIST_data/train-images-idx3-ubyte.gz
WARNING:tensorflow:From /home/raschka/miniconda3/lib/python3.7/site-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:267: extract_labels (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.
Instructions for updating:
Please use tf.data to implement this functionality.
Extracting MNIST_data/train-labels-idx1-ubyte.gz
Extracting MNIST_data/t10k-images-idx3-ubyte.gz
Extracting MNIST_data/t10k-labels-idx1-ubyte.gz
WARNING:tensorflow:From /home/raschka/miniconda3/lib/python3.7/site-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:290: DataSet.__init__ (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.
Instructions for updating:
Please use alternatives such as official/mnist/dataset.py from tensorflow/models.
WARNING:tensorflow:From <ipython-input-3-05c8e8f3b1eb>:64: dense (from tensorflow.python.layers.core) is deprecated and will be removed in a future version.
Instructions for updating:
Use keras.layers.dense instead.
WARNING:tensorflow:From /home/raschka/miniconda3/lib/python3.7/site-packages/tensorflow/python/framework/op_def_library.py:263: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.
Instructions for updating:
Colocations handled automatically by placer.
WARNING:tensorflow:From <ipython-input-3-05c8e8f3b1eb>:65: batch_normalization (from tensorflow.python.layers.normalization) is deprecated and will be removed in a future version.
Instructions for updating:
Use keras.layers.batch_normalization instead.
WARNING:tensorflow:From <ipython-input-3-05c8e8f3b1eb>:74: conv2d_transpose (from tensorflow.python.layers.convolutional) is deprecated and will be removed in a future version.
Instructions for updating:
Use keras.layers.conv2d_transpose instead.
WARNING:tensorflow:From <ipython-input-3-05c8e8f3b1eb>:77: dropout (from tensorflow.python.layers.core) is deprecated and will be removed in a future version.
Instructions for updating:
Use keras.layers.dropout instead.
WARNING:tensorflow:From <ipython-input-3-05c8e8f3b1eb>:121: conv2d (from tensorflow.python.layers.convolutional) is deprecated and will be removed in a future version.
Instructions for updating:
Use keras.layers.conv2d instead.
WARNING:tensorflow:From /home/raschka/miniconda3/lib/python3.7/site-packages/tensorflow/python/ops/math_ops.py:3066: to_int32 (from tensorflow.python.ops.math_ops) is deprecated and will be removed in a future version.
Instructions for updating:
Use tf.cast instead.
In [4]:
##########################
### TRAINING & EVALUATION
##########################

with tf.Session(graph=g) as sess:
    sess.run(tf.global_variables_initializer())
    
    avg_costs = {'discriminator': [], 'generator': []}

    for epoch in range(training_epochs):
        dis_avg_cost, gen_avg_cost = 0., 0.
        total_batch = mnist.train.num_examples // batch_size

        for i in range(total_batch):
            
            batch_x, batch_y = mnist.train.next_batch(batch_size)
            batch_x = batch_x*2 - 1 # normalize
            batch_randsample = np.random.uniform(-1, 1, size=(batch_size, gen_input_size))
            
            # Train
            
            _, dc = sess.run(['train_discriminator', 'discriminator_cost:0'],
                             feed_dict={'discriminator_inputs:0': batch_x, 
                                        'generator_inputs:0': batch_randsample,
                                        'dropout:0': dropout_rate,
                                        'is_training:0': True})
            
            _, gc = sess.run(['train_generator', 'generator_cost:0'],
                             feed_dict={'generator_inputs:0': batch_randsample,
                                        'dropout:0': dropout_rate,
                                        'is_training:0': True})
            
            dis_avg_cost += dc
            gen_avg_cost += gc

            if not i % print_interval:
                print("Minibatch: %04d | Dis/Gen Cost:    %.3f/%.3f" % (i + 1, dc, gc))
                

        print("Epoch:     %04d | Dis/Gen AvgCost: %.3f/%.3f" % 
              (epoch + 1, dis_avg_cost / total_batch, gen_avg_cost / total_batch))
        
        avg_costs['discriminator'].append(dis_avg_cost / total_batch)
        avg_costs['generator'].append(gen_avg_cost / total_batch)
    
    
    saver.save(sess, save_path='./gan-conv.ckpt')
Minibatch: 0001 | Dis/Gen Cost:    1.290/1.057
Minibatch: 0201 | Dis/Gen Cost:    1.054/1.374
Minibatch: 0401 | Dis/Gen Cost:    0.993/1.822
Minibatch: 0601 | Dis/Gen Cost:    0.580/2.214
Minibatch: 0801 | Dis/Gen Cost:    0.708/1.710
Epoch:     0001 | Dis/Gen AvgCost: 0.821/1.891
Minibatch: 0001 | Dis/Gen Cost:    1.013/2.102
Minibatch: 0201 | Dis/Gen Cost:    1.113/1.212
Minibatch: 0401 | Dis/Gen Cost:    1.247/1.045
Minibatch: 0601 | Dis/Gen Cost:    0.935/1.376
Minibatch: 0801 | Dis/Gen Cost:    0.685/2.113
Epoch:     0002 | Dis/Gen AvgCost: 0.884/1.699
Minibatch: 0001 | Dis/Gen Cost:    1.089/1.191
Minibatch: 0201 | Dis/Gen Cost:    0.971/1.760
Minibatch: 0401 | Dis/Gen Cost:    0.769/2.239
Minibatch: 0601 | Dis/Gen Cost:    1.275/1.442
Minibatch: 0801 | Dis/Gen Cost:    1.023/1.846
Epoch:     0003 | Dis/Gen AvgCost: 0.996/1.580
Minibatch: 0001 | Dis/Gen Cost:    1.088/1.628
Minibatch: 0201 | Dis/Gen Cost:    1.095/1.451
Minibatch: 0401 | Dis/Gen Cost:    0.891/1.560
Minibatch: 0601 | Dis/Gen Cost:    0.974/1.353
Minibatch: 0801 | Dis/Gen Cost:    1.140/1.344
Epoch:     0004 | Dis/Gen AvgCost: 1.119/1.341
Minibatch: 0001 | Dis/Gen Cost:    1.197/1.256
Minibatch: 0201 | Dis/Gen Cost:    1.281/1.192
Minibatch: 0401 | Dis/Gen Cost:    1.159/1.402
Minibatch: 0601 | Dis/Gen Cost:    1.397/0.997
Minibatch: 0801 | Dis/Gen Cost:    1.230/1.087
Epoch:     0005 | Dis/Gen AvgCost: 1.177/1.229
Minibatch: 0001 | Dis/Gen Cost:    1.104/1.103
Minibatch: 0201 | Dis/Gen Cost:    1.385/1.217
Minibatch: 0401 | Dis/Gen Cost:    1.069/1.247
Minibatch: 0601 | Dis/Gen Cost:    1.126/1.309
Minibatch: 0801 | Dis/Gen Cost:    1.202/1.529
Epoch:     0006 | Dis/Gen AvgCost: 1.257/1.143
Minibatch: 0001 | Dis/Gen Cost:    1.274/1.314
Minibatch: 0201 | Dis/Gen Cost:    1.362/0.915
Minibatch: 0401 | Dis/Gen Cost:    1.395/1.082
Minibatch: 0601 | Dis/Gen Cost:    1.270/0.947
Minibatch: 0801 | Dis/Gen Cost:    1.327/1.151
Epoch:     0007 | Dis/Gen AvgCost: 1.324/1.042
Minibatch: 0001 | Dis/Gen Cost:    1.417/0.794
Minibatch: 0201 | Dis/Gen Cost:    1.210/0.995
Minibatch: 0401 | Dis/Gen Cost:    1.558/0.925
Minibatch: 0601 | Dis/Gen Cost:    1.191/1.106
Minibatch: 0801 | Dis/Gen Cost:    1.150/1.047
Epoch:     0008 | Dis/Gen AvgCost: 1.306/1.026
Minibatch: 0001 | Dis/Gen Cost:    1.186/0.991
Minibatch: 0201 | Dis/Gen Cost:    1.332/1.005
Minibatch: 0401 | Dis/Gen Cost:    1.185/1.090
Minibatch: 0601 | Dis/Gen Cost:    1.314/1.000
Minibatch: 0801 | Dis/Gen Cost:    1.115/1.158
Epoch:     0009 | Dis/Gen AvgCost: 1.305/1.006
Minibatch: 0001 | Dis/Gen Cost:    1.348/0.868
Minibatch: 0201 | Dis/Gen Cost:    1.367/0.863
Minibatch: 0401 | Dis/Gen Cost:    1.328/1.020
Minibatch: 0601 | Dis/Gen Cost:    1.395/0.962
Minibatch: 0801 | Dis/Gen Cost:    1.390/0.979
Epoch:     0010 | Dis/Gen AvgCost: 1.300/1.025
Minibatch: 0001 | Dis/Gen Cost:    1.403/1.199
Minibatch: 0201 | Dis/Gen Cost:    1.222/0.985
Minibatch: 0401 | Dis/Gen Cost:    1.212/1.235
Minibatch: 0601 | Dis/Gen Cost:    1.052/1.168
Minibatch: 0801 | Dis/Gen Cost:    1.268/0.917
Epoch:     0011 | Dis/Gen AvgCost: 1.305/1.002
Minibatch: 0001 | Dis/Gen Cost:    1.304/0.949
Minibatch: 0201 | Dis/Gen Cost:    1.198/1.137
Minibatch: 0401 | Dis/Gen Cost:    1.237/1.077
Minibatch: 0601 | Dis/Gen Cost:    1.337/0.930
Minibatch: 0801 | Dis/Gen Cost:    1.341/0.909
Epoch:     0012 | Dis/Gen AvgCost: 1.315/0.986
Minibatch: 0001 | Dis/Gen Cost:    1.411/0.964
Minibatch: 0201 | Dis/Gen Cost:    1.335/0.955
Minibatch: 0401 | Dis/Gen Cost:    1.319/0.927
Minibatch: 0601 | Dis/Gen Cost:    1.257/0.952
Minibatch: 0801 | Dis/Gen Cost:    1.283/0.973
Epoch:     0013 | Dis/Gen AvgCost: 1.329/0.974
Minibatch: 0001 | Dis/Gen Cost:    1.266/1.170
Minibatch: 0201 | Dis/Gen Cost:    1.478/0.830
Minibatch: 0401 | Dis/Gen Cost:    1.300/0.954
Minibatch: 0601 | Dis/Gen Cost:    1.305/0.980
Minibatch: 0801 | Dis/Gen Cost:    1.435/0.809
Epoch:     0014 | Dis/Gen AvgCost: 1.325/0.946
Minibatch: 0001 | Dis/Gen Cost:    1.305/0.940
Minibatch: 0201 | Dis/Gen Cost:    1.473/0.910
Minibatch: 0401 | Dis/Gen Cost:    1.408/0.976
Minibatch: 0601 | Dis/Gen Cost:    1.312/0.944
Minibatch: 0801 | Dis/Gen Cost:    1.412/0.905
Epoch:     0015 | Dis/Gen AvgCost: 1.338/0.949
Minibatch: 0001 | Dis/Gen Cost:    1.297/0.971
Minibatch: 0201 | Dis/Gen Cost:    1.196/1.051
Minibatch: 0401 | Dis/Gen Cost:    1.262/0.956
Minibatch: 0601 | Dis/Gen Cost:    1.248/0.974
Minibatch: 0801 | Dis/Gen Cost:    1.278/0.954
Epoch:     0016 | Dis/Gen AvgCost: 1.331/0.947
Minibatch: 0001 | Dis/Gen Cost:    1.227/0.928
Minibatch: 0201 | Dis/Gen Cost:    1.304/0.998
Minibatch: 0401 | Dis/Gen Cost:    1.195/0.963
Minibatch: 0601 | Dis/Gen Cost:    1.230/0.910
Minibatch: 0801 | Dis/Gen Cost:    1.281/1.064
Epoch:     0017 | Dis/Gen AvgCost: 1.335/0.914
Minibatch: 0001 | Dis/Gen Cost:    1.423/0.921
Minibatch: 0201 | Dis/Gen Cost:    1.309/0.892
Minibatch: 0401 | Dis/Gen Cost:    1.311/0.895
Minibatch: 0601 | Dis/Gen Cost:    1.378/0.842
Minibatch: 0801 | Dis/Gen Cost:    1.388/0.833
Epoch:     0018 | Dis/Gen AvgCost: 1.344/0.902
Minibatch: 0001 | Dis/Gen Cost:    1.177/1.030
Minibatch: 0201 | Dis/Gen Cost:    1.255/1.045
Minibatch: 0401 | Dis/Gen Cost:    1.359/0.986
Minibatch: 0601 | Dis/Gen Cost:    1.273/0.944
Minibatch: 0801 | Dis/Gen Cost:    1.297/0.914
Epoch:     0019 | Dis/Gen AvgCost: 1.333/0.928
Minibatch: 0001 | Dis/Gen Cost:    1.403/0.921
Minibatch: 0201 | Dis/Gen Cost:    1.272/0.932
Minibatch: 0401 | Dis/Gen Cost:    1.250/0.931
Minibatch: 0601 | Dis/Gen Cost:    1.298/0.904
Minibatch: 0801 | Dis/Gen Cost:    1.290/0.852
Epoch:     0020 | Dis/Gen AvgCost: 1.332/0.916
Minibatch: 0001 | Dis/Gen Cost:    1.384/0.898
Minibatch: 0201 | Dis/Gen Cost:    1.386/0.886
Minibatch: 0401 | Dis/Gen Cost:    1.314/1.025
Minibatch: 0601 | Dis/Gen Cost:    1.546/0.881
Minibatch: 0801 | Dis/Gen Cost:    1.202/1.017
Epoch:     0021 | Dis/Gen AvgCost: 1.330/0.930
Minibatch: 0001 | Dis/Gen Cost:    1.232/1.135
Minibatch: 0201 | Dis/Gen Cost:    1.317/0.930
Minibatch: 0401 | Dis/Gen Cost:    1.194/1.068
Minibatch: 0601 | Dis/Gen Cost:    1.378/0.859
Minibatch: 0801 | Dis/Gen Cost:    1.267/0.955
Epoch:     0022 | Dis/Gen AvgCost: 1.339/0.907
Minibatch: 0001 | Dis/Gen Cost:    1.294/0.937
Minibatch: 0201 | Dis/Gen Cost:    1.347/0.860
Minibatch: 0401 | Dis/Gen Cost:    1.362/0.878
Minibatch: 0601 | Dis/Gen Cost:    1.228/0.866
Minibatch: 0801 | Dis/Gen Cost:    1.344/0.900
Epoch:     0023 | Dis/Gen AvgCost: 1.339/0.895
Minibatch: 0001 | Dis/Gen Cost:    1.454/0.811
Minibatch: 0201 | Dis/Gen Cost:    1.448/0.924
Minibatch: 0401 | Dis/Gen Cost:    1.300/0.950
Minibatch: 0601 | Dis/Gen Cost:    1.326/0.881
Minibatch: 0801 | Dis/Gen Cost:    1.283/1.006
Epoch:     0024 | Dis/Gen AvgCost: 1.340/0.889
Minibatch: 0001 | Dis/Gen Cost:    1.348/0.922
Minibatch: 0201 | Dis/Gen Cost:    1.430/0.758
Minibatch: 0401 | Dis/Gen Cost:    1.369/0.870
Minibatch: 0601 | Dis/Gen Cost:    1.343/0.838
Minibatch: 0801 | Dis/Gen Cost:    1.189/0.967
Epoch:     0025 | Dis/Gen AvgCost: 1.347/0.891
Minibatch: 0001 | Dis/Gen Cost:    1.395/0.865
Minibatch: 0201 | Dis/Gen Cost:    1.495/0.803
Minibatch: 0401 | Dis/Gen Cost:    1.450/0.861
Minibatch: 0601 | Dis/Gen Cost:    1.299/0.953
Minibatch: 0801 | Dis/Gen Cost:    1.426/0.793
Epoch:     0026 | Dis/Gen AvgCost: 1.339/0.891
Minibatch: 0001 | Dis/Gen Cost:    1.348/0.856
Minibatch: 0201 | Dis/Gen Cost:    1.303/0.942
Minibatch: 0401 | Dis/Gen Cost:    1.344/0.846
Minibatch: 0601 | Dis/Gen Cost:    1.276/0.888
Minibatch: 0801 | Dis/Gen Cost:    1.393/0.855
Epoch:     0027 | Dis/Gen AvgCost: 1.347/0.881
Minibatch: 0001 | Dis/Gen Cost:    1.305/0.963
Minibatch: 0201 | Dis/Gen Cost:    1.391/0.850
Minibatch: 0401 | Dis/Gen Cost:    1.380/0.795
Minibatch: 0601 | Dis/Gen Cost:    1.295/0.840
Minibatch: 0801 | Dis/Gen Cost:    1.194/0.927
Epoch:     0028 | Dis/Gen AvgCost: 1.350/0.867
Minibatch: 0001 | Dis/Gen Cost:    1.394/0.805
Minibatch: 0201 | Dis/Gen Cost:    1.288/0.889
Minibatch: 0401 | Dis/Gen Cost:    1.331/0.922
Minibatch: 0601 | Dis/Gen Cost:    1.466/0.795
Minibatch: 0801 | Dis/Gen Cost:    1.430/0.779
Epoch:     0029 | Dis/Gen AvgCost: 1.341/0.873
Minibatch: 0001 | Dis/Gen Cost:    1.297/0.879
Minibatch: 0201 | Dis/Gen Cost:    1.268/0.932
Minibatch: 0401 | Dis/Gen Cost:    1.432/0.831
Minibatch: 0601 | Dis/Gen Cost:    1.335/0.845
Minibatch: 0801 | Dis/Gen Cost:    1.401/0.962
Epoch:     0030 | Dis/Gen AvgCost: 1.337/0.872
Minibatch: 0001 | Dis/Gen Cost:    1.300/0.910
Minibatch: 0201 | Dis/Gen Cost:    1.369/0.872
Minibatch: 0401 | Dis/Gen Cost:    1.421/0.826
Minibatch: 0601 | Dis/Gen Cost:    1.351/0.946
Minibatch: 0801 | Dis/Gen Cost:    1.401/0.864
Epoch:     0031 | Dis/Gen AvgCost: 1.344/0.863
Minibatch: 0001 | Dis/Gen Cost:    1.273/0.875
Minibatch: 0201 | Dis/Gen Cost:    1.353/0.836
Minibatch: 0401 | Dis/Gen Cost:    1.372/0.867
Minibatch: 0601 | Dis/Gen Cost:    1.368/0.853
Minibatch: 0801 | Dis/Gen Cost:    1.186/0.904
Epoch:     0032 | Dis/Gen AvgCost: 1.342/0.868
Minibatch: 0001 | Dis/Gen Cost:    1.405/0.823
Minibatch: 0201 | Dis/Gen Cost:    1.321/0.931
Minibatch: 0401 | Dis/Gen Cost:    1.361/0.858
Minibatch: 0601 | Dis/Gen Cost:    1.274/0.891
Minibatch: 0801 | Dis/Gen Cost:    1.397/0.848
Epoch:     0033 | Dis/Gen AvgCost: 1.345/0.858
Minibatch: 0001 | Dis/Gen Cost:    1.174/0.992
Minibatch: 0201 | Dis/Gen Cost:    1.278/0.902
Minibatch: 0401 | Dis/Gen Cost:    1.341/0.900
Minibatch: 0601 | Dis/Gen Cost:    1.267/0.906
Minibatch: 0801 | Dis/Gen Cost:    1.369/0.820
Epoch:     0034 | Dis/Gen AvgCost: 1.346/0.862
Minibatch: 0001 | Dis/Gen Cost:    1.305/0.838
Minibatch: 0201 | Dis/Gen Cost:    1.403/0.846
Minibatch: 0401 | Dis/Gen Cost:    1.338/0.850
Minibatch: 0601 | Dis/Gen Cost:    1.343/0.833
Minibatch: 0801 | Dis/Gen Cost:    1.334/0.797
Epoch:     0035 | Dis/Gen AvgCost: 1.353/0.850
Minibatch: 0001 | Dis/Gen Cost:    1.394/0.846
Minibatch: 0201 | Dis/Gen Cost:    1.407/0.841
Minibatch: 0401 | Dis/Gen Cost:    1.481/0.732
Minibatch: 0601 | Dis/Gen Cost:    1.328/0.884
Minibatch: 0801 | Dis/Gen Cost:    1.414/0.789
Epoch:     0036 | Dis/Gen AvgCost: 1.352/0.850
Minibatch: 0001 | Dis/Gen Cost:    1.310/0.838
Minibatch: 0201 | Dis/Gen Cost:    1.376/0.805
Minibatch: 0401 | Dis/Gen Cost:    1.341/0.864
Minibatch: 0601 | Dis/Gen Cost:    1.328/0.896
Minibatch: 0801 | Dis/Gen Cost:    1.383/0.791
Epoch:     0037 | Dis/Gen AvgCost: 1.352/0.840
Minibatch: 0001 | Dis/Gen Cost:    1.295/0.861
Minibatch: 0201 | Dis/Gen Cost:    1.455/0.826
Minibatch: 0401 | Dis/Gen Cost:    1.420/0.796
Minibatch: 0601 | Dis/Gen Cost:    1.337/0.871
Minibatch: 0801 | Dis/Gen Cost:    1.328/0.863
Epoch:     0038 | Dis/Gen AvgCost: 1.348/0.852
Minibatch: 0001 | Dis/Gen Cost:    1.382/0.824
Minibatch: 0201 | Dis/Gen Cost:    1.302/0.897
Minibatch: 0401 | Dis/Gen Cost:    1.385/0.792
Minibatch: 0601 | Dis/Gen Cost:    1.314/0.847
Minibatch: 0801 | Dis/Gen Cost:    1.423/0.779
Epoch:     0039 | Dis/Gen AvgCost: 1.350/0.848
Minibatch: 0001 | Dis/Gen Cost:    1.419/0.852
Minibatch: 0201 | Dis/Gen Cost:    1.390/0.885
Minibatch: 0401 | Dis/Gen Cost:    1.348/0.802
Minibatch: 0601 | Dis/Gen Cost:    1.349/0.833
Minibatch: 0801 | Dis/Gen Cost:    1.382/0.775
Epoch:     0040 | Dis/Gen AvgCost: 1.349/0.842
Minibatch: 0001 | Dis/Gen Cost:    1.289/0.918
Minibatch: 0201 | Dis/Gen Cost:    1.410/0.772
Minibatch: 0401 | Dis/Gen Cost:    1.393/0.790
Minibatch: 0601 | Dis/Gen Cost:    1.317/0.829
Minibatch: 0801 | Dis/Gen Cost:    1.267/0.878
Epoch:     0041 | Dis/Gen AvgCost: 1.358/0.837
Minibatch: 0001 | Dis/Gen Cost:    1.342/0.859
Minibatch: 0201 | Dis/Gen Cost:    1.340/0.870
Minibatch: 0401 | Dis/Gen Cost:    1.394/0.803
Minibatch: 0601 | Dis/Gen Cost:    1.355/0.820
Minibatch: 0801 | Dis/Gen Cost:    1.359/0.836
Epoch:     0042 | Dis/Gen AvgCost: 1.348/0.847
Minibatch: 0001 | Dis/Gen Cost:    1.330/0.807
Minibatch: 0201 | Dis/Gen Cost:    1.386/0.836
Minibatch: 0401 | Dis/Gen Cost:    1.400/0.816
Minibatch: 0601 | Dis/Gen Cost:    1.355/0.855
Minibatch: 0801 | Dis/Gen Cost:    1.315/0.919
Epoch:     0043 | Dis/Gen AvgCost: 1.354/0.845
Minibatch: 0001 | Dis/Gen Cost:    1.338/0.838
Minibatch: 0201 | Dis/Gen Cost:    1.317/0.866
Minibatch: 0401 | Dis/Gen Cost:    1.341/0.819
Minibatch: 0601 | Dis/Gen Cost:    1.260/0.863
Minibatch: 0801 | Dis/Gen Cost:    1.285/0.917
Epoch:     0044 | Dis/Gen AvgCost: 1.351/0.850
Minibatch: 0001 | Dis/Gen Cost:    1.378/0.826
Minibatch: 0201 | Dis/Gen Cost:    1.332/0.881
Minibatch: 0401 | Dis/Gen Cost:    1.247/0.920
Minibatch: 0601 | Dis/Gen Cost:    1.339/0.807
Minibatch: 0801 | Dis/Gen Cost:    1.350/0.850
Epoch:     0045 | Dis/Gen AvgCost: 1.356/0.836
Minibatch: 0001 | Dis/Gen Cost:    1.341/0.872
Minibatch: 0201 | Dis/Gen Cost:    1.406/0.818
Minibatch: 0401 | Dis/Gen Cost:    1.478/0.765
Minibatch: 0601 | Dis/Gen Cost:    1.426/0.837
Minibatch: 0801 | Dis/Gen Cost:    1.271/0.824
Epoch:     0046 | Dis/Gen AvgCost: 1.356/0.832
Minibatch: 0001 | Dis/Gen Cost:    1.388/0.812
Minibatch: 0201 | Dis/Gen Cost:    1.279/0.916
Minibatch: 0401 | Dis/Gen Cost:    1.331/0.805
Minibatch: 0601 | Dis/Gen Cost:    1.321/0.861
Minibatch: 0801 | Dis/Gen Cost:    1.344/0.860
Epoch:     0047 | Dis/Gen AvgCost: 1.351/0.843
Minibatch: 0001 | Dis/Gen Cost:    1.342/0.807
Minibatch: 0201 | Dis/Gen Cost:    1.356/0.813
Minibatch: 0401 | Dis/Gen Cost:    1.361/0.806
Minibatch: 0601 | Dis/Gen Cost:    1.393/0.811
Minibatch: 0801 | Dis/Gen Cost:    1.379/0.783
Epoch:     0048 | Dis/Gen AvgCost: 1.357/0.824
Minibatch: 0001 | Dis/Gen Cost:    1.368/0.793
Minibatch: 0201 | Dis/Gen Cost:    1.364/0.812
Minibatch: 0401 | Dis/Gen Cost:    1.339/0.843
Minibatch: 0601 | Dis/Gen Cost:    1.331/0.798
Minibatch: 0801 | Dis/Gen Cost:    1.358/0.815
Epoch:     0049 | Dis/Gen AvgCost: 1.359/0.823
Minibatch: 0001 | Dis/Gen Cost:    1.367/0.819
Minibatch: 0201 | Dis/Gen Cost:    1.300/0.845
Minibatch: 0401 | Dis/Gen Cost:    1.364/0.808
Minibatch: 0601 | Dis/Gen Cost:    1.284/0.912
Minibatch: 0801 | Dis/Gen Cost:    1.334/0.837
Epoch:     0050 | Dis/Gen AvgCost: 1.355/0.833
In [5]:
%matplotlib inline
import matplotlib.pyplot as plt

plt.plot(range(len(avg_costs['discriminator'])), 
         avg_costs['discriminator'], label='discriminator')
plt.plot(range(len(avg_costs['generator'])),
         avg_costs['generator'], label='generator')
plt.legend()
plt.show()
In [6]:
####################################
### RELOAD & GENERATE SAMPLE IMAGES
####################################


n_examples = 25

with tf.Session(graph=g) as sess:
    saver.restore(sess, save_path='./gan-conv.ckpt')

    batch_randsample = np.random.uniform(-1, 1, size=(n_examples, gen_input_size))
    new_examples = sess.run('generator/generator_outputs:0',
                            feed_dict={'generator_inputs:0': batch_randsample,
                                       'dropout:0': 0.0,
                                       'is_training:0': False})

fig, axes = plt.subplots(nrows=5, ncols=5, figsize=(8, 8),
                         sharey=True, sharex=True)

for image, ax in zip(new_examples, axes.flatten()):
    ax.imshow(image.reshape((dis_input_size // 28, dis_input_size // 28)), cmap='binary')

plt.show()
WARNING:tensorflow:From /home/raschka/miniconda3/lib/python3.7/site-packages/tensorflow/python/training/saver.py:1266: checkpoint_exists (from tensorflow.python.training.checkpoint_management) is deprecated and will be removed in a future version.
Instructions for updating:
Use standard file APIs to check for files with this prefix.
INFO:tensorflow:Restoring parameters from ./gan-conv.ckpt