Deep Learning Models -- A collection of various deep learning architectures, models, and tips for TensorFlow and PyTorch in Jupyter Notebooks.

In [1]:
%load_ext watermark
%watermark -a 'Sebastian Raschka' -v -p torch
Sebastian Raschka 

CPython 3.7.3
IPython 7.6.1

torch 1.2.0
  • Runs on CPU or GPU (if available)

Deep Convolutional Wasserstein GAN

Implementation of a deep convolutional Wasserstein GAN based on the paper

The main differences to a conventional deep convolutional GAN are annotated in the code. In short, the main differences are

  1. Not using a sigmoid activation function and just using a linear output layer for the critic (i.e., discriminator).
  2. Using label -1 instead of 1 for the real images; using label 1 instead of 0 for fake images.
  3. Using Wasserstein distance (loss) for training both the critic and the generator.
  4. After each weight update, clip the weights to be in range [-0.1, 0.1].
  5. Train the critic 5 times for each generator training update.

Imports

In [2]:
import time
import numpy as np
import torch
import torch.nn.functional as F
from torchvision import datasets
from torchvision import transforms
import torch.nn as nn
from torch.utils.data import DataLoader


if torch.cuda.is_available():
    torch.backends.cudnn.deterministic = True

Settings and Dataset

In [3]:
##########################
### SETTINGS
##########################

# Device
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")

# Hyperparameters
random_seed = 0
generator_learning_rate = 0.00005
discriminator_learning_rate = 0.00005
NUM_EPOCHS = 100
BATCH_SIZE = 128
LATENT_DIM = 100
IMG_SHAPE = (1, 28, 28)
IMG_SIZE = 1
for x in IMG_SHAPE:
    IMG_SIZE *= x

## WGAN-specific settings
num_iter_critic = 5
weight_clip_value = 0.01


##########################
### MNIST DATASET
##########################

# Note transforms.ToTensor() scales input images
# to 0-1 range
train_dataset = datasets.MNIST(root='data', 
                               train=True, 
                               transform=transforms.ToTensor(),
                               download=True)

test_dataset = datasets.MNIST(root='data', 
                              train=False, 
                              transform=transforms.ToTensor())


train_loader = DataLoader(dataset=train_dataset, 
                          batch_size=BATCH_SIZE,
                          num_workers=4,
                          shuffle=True)

test_loader = DataLoader(dataset=test_dataset, 
                         batch_size=BATCH_SIZE,
                         num_workers=4,
                         shuffle=False)

# Checking the dataset
for images, labels in train_loader:  
    print('Image batch dimensions:', images.shape)
    print('Image label dimensions:', labels.shape)
    break
Image batch dimensions: torch.Size([128, 1, 28, 28])
Image label dimensions: torch.Size([128])

Model

In [4]:
##########################
### MODEL
##########################

class Flatten(nn.Module):
    def forward(self, input):
        return input.view(input.size(0), -1)
    
class Reshape1(nn.Module):
    def forward(self, input):
        return input.view(input.size(0), 64, 7, 7)


def wasserstein_loss(y_true, y_pred):
    return torch.mean(y_true * y_pred)
    
    
class GAN(torch.nn.Module):

    def __init__(self):
        super(GAN, self).__init__()
        
        
        self.generator = nn.Sequential(
              
            nn.Linear(LATENT_DIM, 3136, bias=False),
            nn.BatchNorm1d(num_features=3136),
            nn.LeakyReLU(inplace=True, negative_slope=0.0001),
            Reshape1(),
            
            nn.ConvTranspose2d(in_channels=64, out_channels=32, kernel_size=(3, 3), stride=(2, 2), padding=1, bias=False),
            nn.BatchNorm2d(num_features=32),
            nn.LeakyReLU(inplace=True, negative_slope=0.0001),
            #nn.Dropout2d(p=0.2),
            
            nn.ConvTranspose2d(in_channels=32, out_channels=16, kernel_size=(3, 3), stride=(2, 2), padding=1, bias=False),
            nn.BatchNorm2d(num_features=16),
            nn.LeakyReLU(inplace=True, negative_slope=0.0001),
            #nn.Dropout2d(p=0.2),
            
            nn.ConvTranspose2d(in_channels=16, out_channels=8, kernel_size=(3, 3), stride=(1, 1), padding=0, bias=False),
            nn.BatchNorm2d(num_features=8),
            nn.LeakyReLU(inplace=True, negative_slope=0.0001),
            #nn.Dropout2d(p=0.2),
            
            nn.ConvTranspose2d(in_channels=8, out_channels=1, kernel_size=(2, 2), stride=(1, 1), padding=0, bias=False),
            nn.Tanh()
        )
        
        self.discriminator = nn.Sequential(
            nn.Conv2d(in_channels=1, out_channels=8, padding=1, kernel_size=(3, 3), stride=(2, 2), bias=False),
            nn.BatchNorm2d(num_features=8),
            nn.LeakyReLU(inplace=True, negative_slope=0.0001), 
            #nn.Dropout2d(p=0.2),
            
            nn.Conv2d(in_channels=8, out_channels=16, padding=1, kernel_size=(3, 3), stride=(2, 2), bias=False),
            nn.BatchNorm2d(num_features=16),
            nn.LeakyReLU(inplace=True, negative_slope=0.0001), 
            #nn.Dropout2d(p=0.2),
            
            nn.Conv2d(in_channels=16, out_channels=32, padding=1, kernel_size=(3, 3), stride=(2, 2), bias=False),
            nn.BatchNorm2d(num_features=32),
            nn.LeakyReLU(inplace=True, negative_slope=0.0001), 
            #nn.Dropout2d(p=0.2),
            
            Flatten(),

            nn.Linear(512, 1),
            #nn.Sigmoid()
        )

            
    def generator_forward(self, z):
        img = self.generator(z)
        return img
    
    def discriminator_forward(self, img):
        pred = model.discriminator(img)
        return pred.view(-1)
In [5]:
torch.manual_seed(random_seed)

#del model
model = GAN()
model = model.to(device)

print(model)
GAN(
  (generator): Sequential(
    (0): Linear(in_features=100, out_features=3136, bias=False)
    (1): BatchNorm1d(3136, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (2): LeakyReLU(negative_slope=0.0001, inplace=True)
    (3): Reshape1()
    (4): ConvTranspose2d(64, 32, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
    (5): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (6): LeakyReLU(negative_slope=0.0001, inplace=True)
    (7): ConvTranspose2d(32, 16, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
    (8): BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (9): LeakyReLU(negative_slope=0.0001, inplace=True)
    (10): ConvTranspose2d(16, 8, kernel_size=(3, 3), stride=(1, 1), bias=False)
    (11): BatchNorm2d(8, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (12): LeakyReLU(negative_slope=0.0001, inplace=True)
    (13): ConvTranspose2d(8, 1, kernel_size=(2, 2), stride=(1, 1), bias=False)
    (14): Tanh()
  )
  (discriminator): Sequential(
    (0): Conv2d(1, 8, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
    (1): BatchNorm2d(8, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (2): LeakyReLU(negative_slope=0.0001, inplace=True)
    (3): Conv2d(8, 16, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
    (4): BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (5): LeakyReLU(negative_slope=0.0001, inplace=True)
    (6): Conv2d(16, 32, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
    (7): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (8): LeakyReLU(negative_slope=0.0001, inplace=True)
    (9): Flatten()
    (10): Linear(in_features=512, out_features=1, bias=True)
  )
)
In [6]:
### ## FOR DEBUGGING

"""
outputs = []
def hook(module, input, output):
    outputs.append(output)

for i, layer in enumerate(model.discriminator):
    if isinstance(layer, torch.nn.modules.conv.Conv2d):
        model.discriminator[i].register_forward_hook(hook)

#for i, layer in enumerate(model.generator):
#    if isinstance(layer, torch.nn.modules.ConvTranspose2d):
#        model.generator[i].register_forward_hook(hook)
"""
Out[6]:
'\noutputs = []\ndef hook(module, input, output):\n    outputs.append(output)\n\nfor i, layer in enumerate(model.discriminator):\n    if isinstance(layer, torch.nn.modules.conv.Conv2d):\n        model.discriminator[i].register_forward_hook(hook)\n\n#for i, layer in enumerate(model.generator):\n#    if isinstance(layer, torch.nn.modules.ConvTranspose2d):\n#        model.generator[i].register_forward_hook(hook)\n'
In [7]:
optim_gener = torch.optim.RMSprop(model.generator.parameters(), lr=generator_learning_rate)
optim_discr = torch.optim.RMSprop(model.discriminator.parameters(), lr=discriminator_learning_rate)

Training

In [8]:
start_time = time.time()    

discr_costs = []
gener_costs = []
for epoch in range(NUM_EPOCHS):
    model = model.train()
    for batch_idx, (features, targets) in enumerate(train_loader):

        
        # Normalize images to [-1, 1] range
        features = (features - 0.5)*2.
        features = features.view(-1, IMG_SIZE).to(device) 

        targets = targets.to(device)

        # Regular GAN:
        # valid = torch.ones(targets.size(0)).float().to(device)
        # fake = torch.zeros(targets.size(0)).float().to(device)
        
        # WGAN:
        valid = -(torch.ones(targets.size(0)).float()).to(device)
        fake = torch.ones(targets.size(0)).float().to(device)
        

        ### FORWARD AND BACK PROP
        
        
        # --------------------------
        # Train Generator
        # --------------------------
        
        # Make new images
        z = torch.zeros((targets.size(0), LATENT_DIM)).uniform_(0.0, 1.0).to(device)
        generated_features = model.generator_forward(z)
        
        # Loss for fooling the discriminator
        discr_pred = model.discriminator_forward(generated_features.view(targets.size(0), 1, 28, 28))
        
        # Regular GAN:
        # gener_loss = F.binary_cross_entropy_with_logits(discr_pred, valid)
        
        # WGAN:
        gener_loss = wasserstein_loss(valid, discr_pred)
        
        optim_gener.zero_grad()
        gener_loss.backward()
        optim_gener.step()
        
        
        # --------------------------
        # Train Discriminator
        # --------------------------        

        # WGAN: Multiple loops for the discriminator
        for _ in range(num_iter_critic):
        
            discr_pred_real = model.discriminator_forward(features.view(targets.size(0), 1, 28, 28))
            # Regular GAN:
            # real_loss = F.binary_cross_entropy_with_logits(discr_pred_real, valid)
            # WGAN:
            real_loss = wasserstein_loss(valid, discr_pred_real)

            discr_pred_fake = model.discriminator_forward(generated_features.view(targets.size(0), 1, 28, 28).detach())

            # Regular GAN:
            # fake_loss = F.binary_cross_entropy_with_logits(discr_pred_fake, fake)
            # WGAN:
            fake_loss = wasserstein_loss(fake, discr_pred_fake)

            discr_loss = 0.5*(real_loss + fake_loss)
            
            optim_discr.zero_grad()
            discr_loss.backward()
            optim_discr.step()        

            # WGAN:
            for p in model.discriminator.parameters():
                p.data.clamp_(-weight_clip_value, weight_clip_value)

        
        discr_costs.append(discr_loss.item())
        gener_costs.append(gener_loss.item())
        
        
        ### LOGGING
        if not batch_idx % 100:
            print ('Epoch: %03d/%03d | Batch %03d/%03d | Gen/Dis Loss: %.4f/%.4f' 
                   %(epoch+1, NUM_EPOCHS, batch_idx, 
                     len(train_loader), gener_loss, discr_loss))

    print('Time elapsed: %.2f min' % ((time.time() - start_time)/60))
    
print('Total Training Time: %.2f min' % ((time.time() - start_time)/60))
Epoch: 001/100 | Batch 000/469 | Gen/Dis Loss: 0.3318/-0.0001
Epoch: 001/100 | Batch 100/469 | Gen/Dis Loss: 0.0037/-0.0026
Epoch: 001/100 | Batch 200/469 | Gen/Dis Loss: 0.0121/-0.0126
Epoch: 001/100 | Batch 300/469 | Gen/Dis Loss: 0.0117/-0.0123
Epoch: 001/100 | Batch 400/469 | Gen/Dis Loss: 0.0110/-0.0124
Time elapsed: 0.31 min
Epoch: 002/100 | Batch 000/469 | Gen/Dis Loss: 0.0123/-0.0140
Epoch: 002/100 | Batch 100/469 | Gen/Dis Loss: 0.0124/-0.0136
Epoch: 002/100 | Batch 200/469 | Gen/Dis Loss: 0.0108/-0.0126
Epoch: 002/100 | Batch 300/469 | Gen/Dis Loss: 0.0089/-0.0104
Epoch: 002/100 | Batch 400/469 | Gen/Dis Loss: 0.0093/-0.0108
Time elapsed: 0.64 min
Epoch: 003/100 | Batch 000/469 | Gen/Dis Loss: 0.0095/-0.0107
Epoch: 003/100 | Batch 100/469 | Gen/Dis Loss: 0.0094/-0.0097
Epoch: 003/100 | Batch 200/469 | Gen/Dis Loss: 0.0089/-0.0099
Epoch: 003/100 | Batch 300/469 | Gen/Dis Loss: 0.0084/-0.0087
Epoch: 003/100 | Batch 400/469 | Gen/Dis Loss: 0.0083/-0.0081
Time elapsed: 1.12 min
Epoch: 004/100 | Batch 000/469 | Gen/Dis Loss: 0.0071/-0.0080
Epoch: 004/100 | Batch 100/469 | Gen/Dis Loss: 0.0077/-0.0076
Epoch: 004/100 | Batch 200/469 | Gen/Dis Loss: 0.0090/-0.0070
Epoch: 004/100 | Batch 300/469 | Gen/Dis Loss: 0.0079/-0.0082
Epoch: 004/100 | Batch 400/469 | Gen/Dis Loss: 0.0101/-0.0072
Time elapsed: 1.65 min
Epoch: 005/100 | Batch 000/469 | Gen/Dis Loss: 0.0098/-0.0080
Epoch: 005/100 | Batch 100/469 | Gen/Dis Loss: 0.0089/-0.0078
Epoch: 005/100 | Batch 200/469 | Gen/Dis Loss: 0.0087/-0.0075
Epoch: 005/100 | Batch 300/469 | Gen/Dis Loss: 0.0079/-0.0073
Epoch: 005/100 | Batch 400/469 | Gen/Dis Loss: 0.0058/-0.0078
Time elapsed: 2.15 min
Epoch: 006/100 | Batch 000/469 | Gen/Dis Loss: 0.0048/-0.0071
Epoch: 006/100 | Batch 100/469 | Gen/Dis Loss: 0.0050/-0.0070
Epoch: 006/100 | Batch 200/469 | Gen/Dis Loss: 0.0046/-0.0069
Epoch: 006/100 | Batch 300/469 | Gen/Dis Loss: 0.0060/-0.0069
Epoch: 006/100 | Batch 400/469 | Gen/Dis Loss: 0.0067/-0.0067
Time elapsed: 2.67 min
Epoch: 007/100 | Batch 000/469 | Gen/Dis Loss: 0.0066/-0.0075
Epoch: 007/100 | Batch 100/469 | Gen/Dis Loss: 0.0074/-0.0067
Epoch: 007/100 | Batch 200/469 | Gen/Dis Loss: 0.0053/-0.0028
Epoch: 007/100 | Batch 300/469 | Gen/Dis Loss: 0.0029/-0.0043
Epoch: 007/100 | Batch 400/469 | Gen/Dis Loss: 0.0018/-0.0043
Time elapsed: 3.20 min
Epoch: 008/100 | Batch 000/469 | Gen/Dis Loss: 0.0025/-0.0040
Epoch: 008/100 | Batch 100/469 | Gen/Dis Loss: 0.0015/-0.0034
Epoch: 008/100 | Batch 200/469 | Gen/Dis Loss: -0.0001/-0.0023
Epoch: 008/100 | Batch 300/469 | Gen/Dis Loss: 0.0014/-0.0017
Epoch: 008/100 | Batch 400/469 | Gen/Dis Loss: -0.0003/-0.0022
Time elapsed: 3.74 min
Epoch: 009/100 | Batch 000/469 | Gen/Dis Loss: 0.0006/-0.0021
Epoch: 009/100 | Batch 100/469 | Gen/Dis Loss: 0.0017/-0.0022
Epoch: 009/100 | Batch 200/469 | Gen/Dis Loss: 0.0014/-0.0016
Epoch: 009/100 | Batch 300/469 | Gen/Dis Loss: -0.0005/-0.0015
Epoch: 009/100 | Batch 400/469 | Gen/Dis Loss: -0.0032/-0.0012
Time elapsed: 4.25 min
Epoch: 010/100 | Batch 000/469 | Gen/Dis Loss: -0.0036/-0.0015
Epoch: 010/100 | Batch 100/469 | Gen/Dis Loss: -0.0000/-0.0015
Epoch: 010/100 | Batch 200/469 | Gen/Dis Loss: -0.0024/-0.0009
Epoch: 010/100 | Batch 300/469 | Gen/Dis Loss: -0.0010/-0.0012
Epoch: 010/100 | Batch 400/469 | Gen/Dis Loss: 0.0012/-0.0015
Time elapsed: 4.76 min
Epoch: 011/100 | Batch 000/469 | Gen/Dis Loss: 0.0013/-0.0010
Epoch: 011/100 | Batch 100/469 | Gen/Dis Loss: 0.0003/-0.0011
Epoch: 011/100 | Batch 200/469 | Gen/Dis Loss: -0.0005/-0.0013
Epoch: 011/100 | Batch 300/469 | Gen/Dis Loss: 0.0000/-0.0014
Epoch: 011/100 | Batch 400/469 | Gen/Dis Loss: -0.0002/-0.0014
Time elapsed: 5.26 min
Epoch: 012/100 | Batch 000/469 | Gen/Dis Loss: -0.0000/-0.0012
Epoch: 012/100 | Batch 100/469 | Gen/Dis Loss: 0.0009/-0.0010
Epoch: 012/100 | Batch 200/469 | Gen/Dis Loss: -0.0001/-0.0011
Epoch: 012/100 | Batch 300/469 | Gen/Dis Loss: -0.0016/-0.0010
Epoch: 012/100 | Batch 400/469 | Gen/Dis Loss: -0.0021/-0.0010
Time elapsed: 5.79 min
Epoch: 013/100 | Batch 000/469 | Gen/Dis Loss: -0.0032/-0.0009
Epoch: 013/100 | Batch 100/469 | Gen/Dis Loss: -0.0023/-0.0009
Epoch: 013/100 | Batch 200/469 | Gen/Dis Loss: -0.0038/-0.0013
Epoch: 013/100 | Batch 300/469 | Gen/Dis Loss: 0.0004/-0.0014
Epoch: 013/100 | Batch 400/469 | Gen/Dis Loss: -0.0002/-0.0012
Time elapsed: 6.30 min
Epoch: 014/100 | Batch 000/469 | Gen/Dis Loss: -0.0007/-0.0011
Epoch: 014/100 | Batch 100/469 | Gen/Dis Loss: -0.0009/-0.0012
Epoch: 014/100 | Batch 200/469 | Gen/Dis Loss: -0.0007/-0.0010
Epoch: 014/100 | Batch 300/469 | Gen/Dis Loss: -0.0002/-0.0009
Epoch: 014/100 | Batch 400/469 | Gen/Dis Loss: -0.0009/-0.0008
Time elapsed: 6.82 min
Epoch: 015/100 | Batch 000/469 | Gen/Dis Loss: -0.0006/-0.0008
Epoch: 015/100 | Batch 100/469 | Gen/Dis Loss: -0.0014/-0.0009
Epoch: 015/100 | Batch 200/469 | Gen/Dis Loss: -0.0029/-0.0008
Epoch: 015/100 | Batch 300/469 | Gen/Dis Loss: -0.0030/-0.0008
Epoch: 015/100 | Batch 400/469 | Gen/Dis Loss: -0.0022/-0.0009
Time elapsed: 7.32 min
Epoch: 016/100 | Batch 000/469 | Gen/Dis Loss: -0.0015/-0.0010
Epoch: 016/100 | Batch 100/469 | Gen/Dis Loss: -0.0013/-0.0008
Epoch: 016/100 | Batch 200/469 | Gen/Dis Loss: -0.0011/-0.0008
Epoch: 016/100 | Batch 300/469 | Gen/Dis Loss: -0.0008/-0.0007
Epoch: 016/100 | Batch 400/469 | Gen/Dis Loss: -0.0023/-0.0008
Time elapsed: 7.84 min
Epoch: 017/100 | Batch 000/469 | Gen/Dis Loss: -0.0017/-0.0009
Epoch: 017/100 | Batch 100/469 | Gen/Dis Loss: -0.0017/-0.0008
Epoch: 017/100 | Batch 200/469 | Gen/Dis Loss: -0.0038/-0.0009
Epoch: 017/100 | Batch 300/469 | Gen/Dis Loss: -0.0036/-0.0009
Epoch: 017/100 | Batch 400/469 | Gen/Dis Loss: -0.0029/-0.0007
Time elapsed: 8.39 min
Epoch: 018/100 | Batch 000/469 | Gen/Dis Loss: -0.0024/-0.0009
Epoch: 018/100 | Batch 100/469 | Gen/Dis Loss: -0.0029/-0.0008
Epoch: 018/100 | Batch 200/469 | Gen/Dis Loss: -0.0029/-0.0007
Epoch: 018/100 | Batch 300/469 | Gen/Dis Loss: -0.0014/-0.0007
Epoch: 018/100 | Batch 400/469 | Gen/Dis Loss: -0.0017/-0.0008
Time elapsed: 8.91 min
Epoch: 019/100 | Batch 000/469 | Gen/Dis Loss: -0.0038/-0.0009
Epoch: 019/100 | Batch 100/469 | Gen/Dis Loss: -0.0054/-0.0009
Epoch: 019/100 | Batch 200/469 | Gen/Dis Loss: -0.0035/-0.0010
Epoch: 019/100 | Batch 300/469 | Gen/Dis Loss: -0.0027/-0.0008
Epoch: 019/100 | Batch 400/469 | Gen/Dis Loss: -0.0005/-0.0008
Time elapsed: 9.44 min
Epoch: 020/100 | Batch 000/469 | Gen/Dis Loss: -0.0005/-0.0006
Epoch: 020/100 | Batch 100/469 | Gen/Dis Loss: -0.0010/-0.0005
Epoch: 020/100 | Batch 200/469 | Gen/Dis Loss: -0.0012/-0.0006
Epoch: 020/100 | Batch 300/469 | Gen/Dis Loss: -0.0038/-0.0007
Epoch: 020/100 | Batch 400/469 | Gen/Dis Loss: -0.0041/-0.0008
Time elapsed: 9.97 min
Epoch: 021/100 | Batch 000/469 | Gen/Dis Loss: -0.0043/-0.0008
Epoch: 021/100 | Batch 100/469 | Gen/Dis Loss: -0.0029/-0.0008
Epoch: 021/100 | Batch 200/469 | Gen/Dis Loss: -0.0021/-0.0007
Epoch: 021/100 | Batch 300/469 | Gen/Dis Loss: -0.0023/-0.0007
Epoch: 021/100 | Batch 400/469 | Gen/Dis Loss: -0.0018/-0.0006
Time elapsed: 10.47 min
Epoch: 022/100 | Batch 000/469 | Gen/Dis Loss: -0.0014/-0.0006
Epoch: 022/100 | Batch 100/469 | Gen/Dis Loss: -0.0033/-0.0007
Epoch: 022/100 | Batch 200/469 | Gen/Dis Loss: -0.0007/-0.0005
Epoch: 022/100 | Batch 300/469 | Gen/Dis Loss: 0.0003/-0.0007
Epoch: 022/100 | Batch 400/469 | Gen/Dis Loss: -0.0019/-0.0006
Time elapsed: 10.99 min
Epoch: 023/100 | Batch 000/469 | Gen/Dis Loss: -0.0046/-0.0006
Epoch: 023/100 | Batch 100/469 | Gen/Dis Loss: -0.0029/-0.0006
Epoch: 023/100 | Batch 200/469 | Gen/Dis Loss: -0.0027/-0.0005
Epoch: 023/100 | Batch 300/469 | Gen/Dis Loss: -0.0024/-0.0004
Epoch: 023/100 | Batch 400/469 | Gen/Dis Loss: -0.0037/-0.0005
Time elapsed: 11.48 min
Epoch: 024/100 | Batch 000/469 | Gen/Dis Loss: -0.0032/-0.0005
Epoch: 024/100 | Batch 100/469 | Gen/Dis Loss: -0.0027/-0.0006
Epoch: 024/100 | Batch 200/469 | Gen/Dis Loss: -0.0013/-0.0006
Epoch: 024/100 | Batch 300/469 | Gen/Dis Loss: -0.0010/-0.0006
Epoch: 024/100 | Batch 400/469 | Gen/Dis Loss: -0.0025/-0.0007
Time elapsed: 11.83 min
Epoch: 025/100 | Batch 000/469 | Gen/Dis Loss: -0.0036/-0.0006
Epoch: 025/100 | Batch 100/469 | Gen/Dis Loss: -0.0038/-0.0005
Epoch: 025/100 | Batch 200/469 | Gen/Dis Loss: -0.0030/-0.0006
Epoch: 025/100 | Batch 300/469 | Gen/Dis Loss: -0.0029/-0.0008
Epoch: 025/100 | Batch 400/469 | Gen/Dis Loss: -0.0022/-0.0005
Time elapsed: 12.14 min
Epoch: 026/100 | Batch 000/469 | Gen/Dis Loss: -0.0010/-0.0005
Epoch: 026/100 | Batch 100/469 | Gen/Dis Loss: -0.0030/-0.0005
Epoch: 026/100 | Batch 200/469 | Gen/Dis Loss: -0.0002/-0.0005
Epoch: 026/100 | Batch 300/469 | Gen/Dis Loss: -0.0004/-0.0004
Epoch: 026/100 | Batch 400/469 | Gen/Dis Loss: 0.0006/-0.0005
Time elapsed: 12.45 min
Epoch: 027/100 | Batch 000/469 | Gen/Dis Loss: -0.0004/-0.0004
Epoch: 027/100 | Batch 100/469 | Gen/Dis Loss: -0.0005/-0.0005
Epoch: 027/100 | Batch 200/469 | Gen/Dis Loss: -0.0029/-0.0006
Epoch: 027/100 | Batch 300/469 | Gen/Dis Loss: -0.0031/-0.0005
Epoch: 027/100 | Batch 400/469 | Gen/Dis Loss: -0.0033/-0.0006
Time elapsed: 12.76 min
Epoch: 028/100 | Batch 000/469 | Gen/Dis Loss: 0.0026/-0.0005
Epoch: 028/100 | Batch 100/469 | Gen/Dis Loss: 0.0000/-0.0006
Epoch: 028/100 | Batch 200/469 | Gen/Dis Loss: 0.0007/-0.0002
Epoch: 028/100 | Batch 300/469 | Gen/Dis Loss: -0.0001/-0.0004
Epoch: 028/100 | Batch 400/469 | Gen/Dis Loss: 0.0024/-0.0005
Time elapsed: 13.06 min
Epoch: 029/100 | Batch 000/469 | Gen/Dis Loss: 0.0015/-0.0005
Epoch: 029/100 | Batch 100/469 | Gen/Dis Loss: 0.0006/-0.0004
Epoch: 029/100 | Batch 200/469 | Gen/Dis Loss: 0.0006/-0.0003
Epoch: 029/100 | Batch 300/469 | Gen/Dis Loss: -0.0056/-0.0002
Epoch: 029/100 | Batch 400/469 | Gen/Dis Loss: 0.0086/-0.0007
Time elapsed: 13.36 min
Epoch: 030/100 | Batch 000/469 | Gen/Dis Loss: 0.0015/-0.0006
Epoch: 030/100 | Batch 100/469 | Gen/Dis Loss: -0.0056/-0.0008
Epoch: 030/100 | Batch 200/469 | Gen/Dis Loss: 0.0057/-0.0007
Epoch: 030/100 | Batch 300/469 | Gen/Dis Loss: -0.0112/-0.0001
Epoch: 030/100 | Batch 400/469 | Gen/Dis Loss: 0.0086/-0.0005
Time elapsed: 13.67 min
Epoch: 031/100 | Batch 000/469 | Gen/Dis Loss: 0.0026/-0.0005
Epoch: 031/100 | Batch 100/469 | Gen/Dis Loss: 0.0044/-0.0002
Epoch: 031/100 | Batch 200/469 | Gen/Dis Loss: 0.0021/-0.0003
Epoch: 031/100 | Batch 300/469 | Gen/Dis Loss: 0.0005/-0.0004
Epoch: 031/100 | Batch 400/469 | Gen/Dis Loss: 0.0001/-0.0005
Time elapsed: 13.98 min
Epoch: 032/100 | Batch 000/469 | Gen/Dis Loss: 0.0011/-0.0005
Epoch: 032/100 | Batch 100/469 | Gen/Dis Loss: 0.0046/-0.0008
Epoch: 032/100 | Batch 200/469 | Gen/Dis Loss: 0.0025/-0.0007
Epoch: 032/100 | Batch 300/469 | Gen/Dis Loss: 0.0029/-0.0005
Epoch: 032/100 | Batch 400/469 | Gen/Dis Loss: 0.0069/-0.0007
Time elapsed: 14.29 min
Epoch: 033/100 | Batch 000/469 | Gen/Dis Loss: 0.0048/-0.0006
Epoch: 033/100 | Batch 100/469 | Gen/Dis Loss: 0.0011/-0.0005
Epoch: 033/100 | Batch 200/469 | Gen/Dis Loss: 0.0008/-0.0003
Epoch: 033/100 | Batch 300/469 | Gen/Dis Loss: 0.0039/-0.0006
Epoch: 033/100 | Batch 400/469 | Gen/Dis Loss: 0.0039/-0.0004
Time elapsed: 14.60 min
Epoch: 034/100 | Batch 000/469 | Gen/Dis Loss: 0.0010/-0.0005
Epoch: 034/100 | Batch 100/469 | Gen/Dis Loss: 0.0001/-0.0004
Epoch: 034/100 | Batch 200/469 | Gen/Dis Loss: 0.0026/-0.0004
Epoch: 034/100 | Batch 300/469 | Gen/Dis Loss: 0.0008/-0.0003
Epoch: 034/100 | Batch 400/469 | Gen/Dis Loss: 0.0043/-0.0005
Time elapsed: 14.90 min
Epoch: 035/100 | Batch 000/469 | Gen/Dis Loss: 0.0033/-0.0004
Epoch: 035/100 | Batch 100/469 | Gen/Dis Loss: 0.0017/-0.0002
Epoch: 035/100 | Batch 200/469 | Gen/Dis Loss: 0.0012/-0.0004
Epoch: 035/100 | Batch 300/469 | Gen/Dis Loss: 0.0013/-0.0004
Epoch: 035/100 | Batch 400/469 | Gen/Dis Loss: 0.0018/-0.0003
Time elapsed: 15.20 min
Epoch: 036/100 | Batch 000/469 | Gen/Dis Loss: 0.0046/-0.0004
Epoch: 036/100 | Batch 100/469 | Gen/Dis Loss: 0.0046/-0.0004
Epoch: 036/100 | Batch 200/469 | Gen/Dis Loss: 0.0022/-0.0004
Epoch: 036/100 | Batch 300/469 | Gen/Dis Loss: 0.0007/-0.0002
Epoch: 036/100 | Batch 400/469 | Gen/Dis Loss: 0.0027/-0.0003
Time elapsed: 15.51 min
Epoch: 037/100 | Batch 000/469 | Gen/Dis Loss: 0.0006/-0.0004
Epoch: 037/100 | Batch 100/469 | Gen/Dis Loss: 0.0016/-0.0004
Epoch: 037/100 | Batch 200/469 | Gen/Dis Loss: -0.0014/-0.0003
Epoch: 037/100 | Batch 300/469 | Gen/Dis Loss: 0.0015/-0.0004
Epoch: 037/100 | Batch 400/469 | Gen/Dis Loss: 0.0015/-0.0002
Time elapsed: 15.82 min
Epoch: 038/100 | Batch 000/469 | Gen/Dis Loss: 0.0013/-0.0003
Epoch: 038/100 | Batch 100/469 | Gen/Dis Loss: 0.0011/-0.0002
Epoch: 038/100 | Batch 200/469 | Gen/Dis Loss: 0.0023/-0.0003
Epoch: 038/100 | Batch 300/469 | Gen/Dis Loss: 0.0008/-0.0003
Epoch: 038/100 | Batch 400/469 | Gen/Dis Loss: 0.0031/-0.0003
Time elapsed: 16.28 min
Epoch: 039/100 | Batch 000/469 | Gen/Dis Loss: 0.0041/-0.0002
Epoch: 039/100 | Batch 100/469 | Gen/Dis Loss: 0.0047/-0.0002
Epoch: 039/100 | Batch 200/469 | Gen/Dis Loss: 0.0040/-0.0002
Epoch: 039/100 | Batch 300/469 | Gen/Dis Loss: 0.0051/-0.0003
Epoch: 039/100 | Batch 400/469 | Gen/Dis Loss: 0.0094/-0.0003
Time elapsed: 16.76 min
Epoch: 040/100 | Batch 000/469 | Gen/Dis Loss: 0.0061/-0.0002
Epoch: 040/100 | Batch 100/469 | Gen/Dis Loss: 0.0054/-0.0003
Epoch: 040/100 | Batch 200/469 | Gen/Dis Loss: 0.0064/-0.0002
Epoch: 040/100 | Batch 300/469 | Gen/Dis Loss: 0.0075/-0.0001
Epoch: 040/100 | Batch 400/469 | Gen/Dis Loss: 0.0066/-0.0002
Time elapsed: 17.29 min
Epoch: 041/100 | Batch 000/469 | Gen/Dis Loss: 0.0054/-0.0002
Epoch: 041/100 | Batch 100/469 | Gen/Dis Loss: 0.0021/-0.0002
Epoch: 041/100 | Batch 200/469 | Gen/Dis Loss: 0.0018/-0.0002
Epoch: 041/100 | Batch 300/469 | Gen/Dis Loss: -0.0017/-0.0001
Epoch: 041/100 | Batch 400/469 | Gen/Dis Loss: 0.0028/-0.0002
Time elapsed: 17.79 min
Epoch: 042/100 | Batch 000/469 | Gen/Dis Loss: 0.0041/-0.0002
Epoch: 042/100 | Batch 100/469 | Gen/Dis Loss: 0.0044/-0.0003
Epoch: 042/100 | Batch 200/469 | Gen/Dis Loss: -0.0007/-0.0002
Epoch: 042/100 | Batch 300/469 | Gen/Dis Loss: -0.0091/0.0000
Epoch: 042/100 | Batch 400/469 | Gen/Dis Loss: -0.0043/0.0001
Time elapsed: 18.34 min
Epoch: 043/100 | Batch 000/469 | Gen/Dis Loss: 0.0023/-0.0008
Epoch: 043/100 | Batch 100/469 | Gen/Dis Loss: -0.0025/-0.0006
Epoch: 043/100 | Batch 200/469 | Gen/Dis Loss: -0.0066/-0.0006
Epoch: 043/100 | Batch 300/469 | Gen/Dis Loss: -0.0074/0.0004
Epoch: 043/100 | Batch 400/469 | Gen/Dis Loss: -0.0021/-0.0018
Time elapsed: 18.86 min
Epoch: 044/100 | Batch 000/469 | Gen/Dis Loss: -0.0033/0.0001
Epoch: 044/100 | Batch 100/469 | Gen/Dis Loss: 0.0019/0.0003
Epoch: 044/100 | Batch 200/469 | Gen/Dis Loss: 0.0014/-0.0021
Epoch: 044/100 | Batch 300/469 | Gen/Dis Loss: -0.0003/-0.0016
Epoch: 044/100 | Batch 400/469 | Gen/Dis Loss: -0.0072/-0.0009
Time elapsed: 19.35 min
Epoch: 045/100 | Batch 000/469 | Gen/Dis Loss: -0.0013/-0.0012
Epoch: 045/100 | Batch 100/469 | Gen/Dis Loss: -0.0002/-0.0002
Epoch: 045/100 | Batch 200/469 | Gen/Dis Loss: 0.0008/0.0005
Epoch: 045/100 | Batch 300/469 | Gen/Dis Loss: 0.0056/-0.0006
Epoch: 045/100 | Batch 400/469 | Gen/Dis Loss: -0.0134/0.0001
Time elapsed: 19.88 min
Epoch: 046/100 | Batch 000/469 | Gen/Dis Loss: -0.0147/0.0003
Epoch: 046/100 | Batch 100/469 | Gen/Dis Loss: 0.0120/0.0002
Epoch: 046/100 | Batch 200/469 | Gen/Dis Loss: -0.0061/-0.0006
Epoch: 046/100 | Batch 300/469 | Gen/Dis Loss: 0.0007/-0.0012
Epoch: 046/100 | Batch 400/469 | Gen/Dis Loss: -0.0118/0.0007
Time elapsed: 20.40 min
Epoch: 047/100 | Batch 000/469 | Gen/Dis Loss: 0.0015/-0.0018
Epoch: 047/100 | Batch 100/469 | Gen/Dis Loss: -0.0118/-0.0000
Epoch: 047/100 | Batch 200/469 | Gen/Dis Loss: 0.0048/0.0009
Epoch: 047/100 | Batch 300/469 | Gen/Dis Loss: -0.0124/-0.0005
Epoch: 047/100 | Batch 400/469 | Gen/Dis Loss: -0.0039/0.0002
Time elapsed: 20.91 min
Epoch: 048/100 | Batch 000/469 | Gen/Dis Loss: 0.0008/-0.0021
Epoch: 048/100 | Batch 100/469 | Gen/Dis Loss: -0.0005/-0.0018
Epoch: 048/100 | Batch 200/469 | Gen/Dis Loss: 0.0010/-0.0005
Epoch: 048/100 | Batch 300/469 | Gen/Dis Loss: 0.0115/0.0001
Epoch: 048/100 | Batch 400/469 | Gen/Dis Loss: 0.0111/-0.0002
Time elapsed: 21.40 min
Epoch: 049/100 | Batch 000/469 | Gen/Dis Loss: -0.0005/-0.0015
Epoch: 049/100 | Batch 100/469 | Gen/Dis Loss: 0.0011/0.0006
Epoch: 049/100 | Batch 200/469 | Gen/Dis Loss: -0.0071/-0.0001
Epoch: 049/100 | Batch 300/469 | Gen/Dis Loss: -0.0178/0.0002
Epoch: 049/100 | Batch 400/469 | Gen/Dis Loss: 0.0072/-0.0016
Time elapsed: 21.93 min
Epoch: 050/100 | Batch 000/469 | Gen/Dis Loss: -0.0129/0.0002
Epoch: 050/100 | Batch 100/469 | Gen/Dis Loss: 0.0003/-0.0013
Epoch: 050/100 | Batch 200/469 | Gen/Dis Loss: -0.0002/-0.0005
Epoch: 050/100 | Batch 300/469 | Gen/Dis Loss: -0.0052/-0.0002
Epoch: 050/100 | Batch 400/469 | Gen/Dis Loss: -0.0026/0.0008
Time elapsed: 22.46 min
Epoch: 051/100 | Batch 000/469 | Gen/Dis Loss: -0.0113/0.0000
Epoch: 051/100 | Batch 100/469 | Gen/Dis Loss: -0.0013/-0.0012
Epoch: 051/100 | Batch 200/469 | Gen/Dis Loss: 0.0119/0.0004
Epoch: 051/100 | Batch 300/469 | Gen/Dis Loss: -0.0066/0.0003
Epoch: 051/100 | Batch 400/469 | Gen/Dis Loss: -0.0078/-0.0013
Time elapsed: 22.98 min
Epoch: 052/100 | Batch 000/469 | Gen/Dis Loss: 0.0033/-0.0019
Epoch: 052/100 | Batch 100/469 | Gen/Dis Loss: -0.0053/-0.0001
Epoch: 052/100 | Batch 200/469 | Gen/Dis Loss: 0.0040/0.0007
Epoch: 052/100 | Batch 300/469 | Gen/Dis Loss: 0.0033/-0.0021
Time elapsed: 23.50 min
Epoch: 053/100 | Batch 000/469 | Gen/Dis Loss: -0.0034/-0.0008
Epoch: 053/100 | Batch 100/469 | Gen/Dis Loss: 0.0042/-0.0001
Epoch: 053/100 | Batch 200/469 | Gen/Dis Loss: 0.0124/-0.0001
Epoch: 053/100 | Batch 300/469 | Gen/Dis Loss: -0.0017/0.0004
Epoch: 053/100 | Batch 400/469 | Gen/Dis Loss: -0.0066/0.0002
Time elapsed: 24.02 min
Epoch: 054/100 | Batch 000/469 | Gen/Dis Loss: 0.0108/-0.0000
Epoch: 054/100 | Batch 100/469 | Gen/Dis Loss: -0.0019/-0.0019
Epoch: 054/100 | Batch 200/469 | Gen/Dis Loss: 0.0058/-0.0010
Epoch: 054/100 | Batch 300/469 | Gen/Dis Loss: 0.0080/-0.0002
Epoch: 054/100 | Batch 400/469 | Gen/Dis Loss: -0.0115/-0.0003
Time elapsed: 24.55 min
Epoch: 055/100 | Batch 000/469 | Gen/Dis Loss: 0.0126/-0.0001
Epoch: 055/100 | Batch 100/469 | Gen/Dis Loss: 0.0151/-0.0007
Epoch: 055/100 | Batch 200/469 | Gen/Dis Loss: -0.0005/0.0007
Epoch: 055/100 | Batch 300/469 | Gen/Dis Loss: 0.0079/-0.0014
Epoch: 055/100 | Batch 400/469 | Gen/Dis Loss: -0.0089/-0.0005
Time elapsed: 25.07 min
Epoch: 056/100 | Batch 000/469 | Gen/Dis Loss: -0.0097/0.0002
Epoch: 056/100 | Batch 100/469 | Gen/Dis Loss: -0.0038/0.0010
Epoch: 056/100 | Batch 200/469 | Gen/Dis Loss: -0.0095/0.0006
Epoch: 056/100 | Batch 300/469 | Gen/Dis Loss: -0.0044/-0.0008
Epoch: 056/100 | Batch 400/469 | Gen/Dis Loss: -0.0044/-0.0016
Time elapsed: 25.58 min
Epoch: 057/100 | Batch 000/469 | Gen/Dis Loss: -0.0152/-0.0004
Epoch: 057/100 | Batch 100/469 | Gen/Dis Loss: 0.0012/-0.0002
Epoch: 057/100 | Batch 200/469 | Gen/Dis Loss: -0.0033/-0.0004
Epoch: 057/100 | Batch 300/469 | Gen/Dis Loss: 0.0100/-0.0000
Epoch: 057/100 | Batch 400/469 | Gen/Dis Loss: -0.0003/-0.0003
Time elapsed: 26.10 min
Epoch: 058/100 | Batch 000/469 | Gen/Dis Loss: -0.0068/-0.0003
Epoch: 058/100 | Batch 100/469 | Gen/Dis Loss: -0.0049/0.0001
Epoch: 058/100 | Batch 200/469 | Gen/Dis Loss: 0.0008/0.0003
Epoch: 058/100 | Batch 300/469 | Gen/Dis Loss: -0.0002/-0.0016
Epoch: 058/100 | Batch 400/469 | Gen/Dis Loss: -0.0016/-0.0004
Time elapsed: 26.57 min
Epoch: 059/100 | Batch 000/469 | Gen/Dis Loss: -0.0093/-0.0001
Epoch: 059/100 | Batch 100/469 | Gen/Dis Loss: 0.0033/-0.0002
Epoch: 059/100 | Batch 200/469 | Gen/Dis Loss: 0.0009/-0.0004
Epoch: 059/100 | Batch 300/469 | Gen/Dis Loss: -0.0142/-0.0001
Epoch: 059/100 | Batch 400/469 | Gen/Dis Loss: -0.0129/0.0001
Time elapsed: 26.97 min
Epoch: 060/100 | Batch 000/469 | Gen/Dis Loss: -0.0021/-0.0009
Epoch: 060/100 | Batch 100/469 | Gen/Dis Loss: 0.0020/-0.0002
Epoch: 060/100 | Batch 200/469 | Gen/Dis Loss: -0.0099/-0.0010
Epoch: 060/100 | Batch 300/469 | Gen/Dis Loss: -0.0112/0.0001
Epoch: 060/100 | Batch 400/469 | Gen/Dis Loss: -0.0024/-0.0004
Time elapsed: 27.28 min
Epoch: 061/100 | Batch 000/469 | Gen/Dis Loss: -0.0044/-0.0012
Epoch: 061/100 | Batch 100/469 | Gen/Dis Loss: -0.0034/-0.0005
Epoch: 061/100 | Batch 200/469 | Gen/Dis Loss: -0.0031/-0.0009
Epoch: 061/100 | Batch 300/469 | Gen/Dis Loss: -0.0058/-0.0000
Epoch: 061/100 | Batch 400/469 | Gen/Dis Loss: -0.0034/0.0003
Time elapsed: 27.59 min
Epoch: 062/100 | Batch 000/469 | Gen/Dis Loss: -0.0017/-0.0014
Epoch: 062/100 | Batch 100/469 | Gen/Dis Loss: -0.0043/0.0000
Epoch: 062/100 | Batch 200/469 | Gen/Dis Loss: -0.0055/0.0001
Epoch: 062/100 | Batch 300/469 | Gen/Dis Loss: 0.0066/-0.0009
Epoch: 062/100 | Batch 400/469 | Gen/Dis Loss: -0.0013/0.0007
Time elapsed: 27.89 min
Epoch: 063/100 | Batch 000/469 | Gen/Dis Loss: 0.0031/-0.0013
Epoch: 063/100 | Batch 100/469 | Gen/Dis Loss: -0.0093/0.0000
Epoch: 063/100 | Batch 200/469 | Gen/Dis Loss: -0.0041/-0.0003
Epoch: 063/100 | Batch 300/469 | Gen/Dis Loss: 0.0093/-0.0003
Epoch: 063/100 | Batch 400/469 | Gen/Dis Loss: 0.0140/0.0003
Time elapsed: 28.20 min
Epoch: 064/100 | Batch 000/469 | Gen/Dis Loss: 0.0119/0.0003
Epoch: 064/100 | Batch 100/469 | Gen/Dis Loss: 0.0124/-0.0009
Epoch: 064/100 | Batch 200/469 | Gen/Dis Loss: -0.0076/0.0001
Epoch: 064/100 | Batch 300/469 | Gen/Dis Loss: -0.0027/-0.0002
Epoch: 064/100 | Batch 400/469 | Gen/Dis Loss: 0.0005/0.0002
Time elapsed: 28.51 min
Epoch: 065/100 | Batch 000/469 | Gen/Dis Loss: -0.0002/0.0005
Epoch: 065/100 | Batch 100/469 | Gen/Dis Loss: 0.0005/-0.0006
Epoch: 065/100 | Batch 200/469 | Gen/Dis Loss: 0.0089/0.0001
Epoch: 065/100 | Batch 300/469 | Gen/Dis Loss: -0.0060/-0.0007
Epoch: 065/100 | Batch 400/469 | Gen/Dis Loss: 0.0010/-0.0006
Time elapsed: 28.82 min
Epoch: 066/100 | Batch 000/469 | Gen/Dis Loss: 0.0078/0.0003
Epoch: 066/100 | Batch 100/469 | Gen/Dis Loss: 0.0001/0.0003
Epoch: 066/100 | Batch 200/469 | Gen/Dis Loss: -0.0047/-0.0001
Epoch: 066/100 | Batch 300/469 | Gen/Dis Loss: 0.0067/-0.0005
Epoch: 066/100 | Batch 400/469 | Gen/Dis Loss: 0.0030/-0.0004
Time elapsed: 29.13 min
Epoch: 067/100 | Batch 000/469 | Gen/Dis Loss: 0.0048/-0.0002
Epoch: 067/100 | Batch 100/469 | Gen/Dis Loss: 0.0046/-0.0004
Epoch: 067/100 | Batch 200/469 | Gen/Dis Loss: -0.0027/-0.0002
Epoch: 067/100 | Batch 300/469 | Gen/Dis Loss: 0.0040/-0.0005
Epoch: 067/100 | Batch 400/469 | Gen/Dis Loss: 0.0062/-0.0004
Time elapsed: 29.44 min
Epoch: 068/100 | Batch 000/469 | Gen/Dis Loss: 0.0033/0.0000
Epoch: 068/100 | Batch 100/469 | Gen/Dis Loss: 0.0000/-0.0007
Epoch: 068/100 | Batch 200/469 | Gen/Dis Loss: -0.0038/0.0001
Epoch: 068/100 | Batch 300/469 | Gen/Dis Loss: -0.0047/-0.0000
Epoch: 068/100 | Batch 400/469 | Gen/Dis Loss: 0.0131/-0.0001
Time elapsed: 29.74 min
Epoch: 069/100 | Batch 000/469 | Gen/Dis Loss: 0.0051/-0.0004
Epoch: 069/100 | Batch 100/469 | Gen/Dis Loss: 0.0027/-0.0002
Epoch: 069/100 | Batch 200/469 | Gen/Dis Loss: -0.0019/-0.0005
Epoch: 069/100 | Batch 300/469 | Gen/Dis Loss: 0.0072/-0.0000
Epoch: 069/100 | Batch 400/469 | Gen/Dis Loss: -0.0068/0.0002
Time elapsed: 30.05 min
Epoch: 070/100 | Batch 000/469 | Gen/Dis Loss: 0.0098/-0.0007
Epoch: 070/100 | Batch 100/469 | Gen/Dis Loss: -0.0070/-0.0001
Epoch: 070/100 | Batch 200/469 | Gen/Dis Loss: 0.0102/-0.0005
Epoch: 070/100 | Batch 300/469 | Gen/Dis Loss: -0.0098/0.0000
Epoch: 070/100 | Batch 400/469 | Gen/Dis Loss: -0.0121/-0.0001
Time elapsed: 30.37 min
Epoch: 071/100 | Batch 000/469 | Gen/Dis Loss: 0.0081/0.0000
Epoch: 071/100 | Batch 100/469 | Gen/Dis Loss: 0.0028/-0.0002
Epoch: 071/100 | Batch 200/469 | Gen/Dis Loss: -0.0082/-0.0002
Epoch: 071/100 | Batch 300/469 | Gen/Dis Loss: 0.0113/-0.0003
Epoch: 071/100 | Batch 400/469 | Gen/Dis Loss: 0.0028/-0.0009
Time elapsed: 30.68 min
Epoch: 072/100 | Batch 000/469 | Gen/Dis Loss: 0.0087/-0.0003
Epoch: 072/100 | Batch 100/469 | Gen/Dis Loss: -0.0092/0.0004
Epoch: 072/100 | Batch 200/469 | Gen/Dis Loss: 0.0016/-0.0002
Epoch: 072/100 | Batch 300/469 | Gen/Dis Loss: 0.0059/-0.0011
Epoch: 072/100 | Batch 400/469 | Gen/Dis Loss: -0.0058/-0.0004
Time elapsed: 30.99 min
Epoch: 073/100 | Batch 000/469 | Gen/Dis Loss: -0.0079/-0.0006
Epoch: 073/100 | Batch 100/469 | Gen/Dis Loss: 0.0076/-0.0001
Epoch: 073/100 | Batch 200/469 | Gen/Dis Loss: -0.0003/-0.0004
Epoch: 073/100 | Batch 300/469 | Gen/Dis Loss: 0.0090/-0.0000
Epoch: 073/100 | Batch 400/469 | Gen/Dis Loss: 0.0064/-0.0003
Time elapsed: 31.29 min
Epoch: 074/100 | Batch 000/469 | Gen/Dis Loss: -0.0062/0.0002
Epoch: 074/100 | Batch 100/469 | Gen/Dis Loss: 0.0074/0.0004
Epoch: 074/100 | Batch 200/469 | Gen/Dis Loss: 0.0034/-0.0004
Epoch: 074/100 | Batch 300/469 | Gen/Dis Loss: -0.0032/0.0000
Epoch: 074/100 | Batch 400/469 | Gen/Dis Loss: 0.0045/-0.0016
Time elapsed: 31.61 min
Epoch: 075/100 | Batch 000/469 | Gen/Dis Loss: 0.0067/-0.0018
Epoch: 075/100 | Batch 100/469 | Gen/Dis Loss: -0.0029/-0.0007
Epoch: 075/100 | Batch 200/469 | Gen/Dis Loss: -0.0014/-0.0001
Epoch: 075/100 | Batch 300/469 | Gen/Dis Loss: -0.0001/-0.0013
Epoch: 075/100 | Batch 400/469 | Gen/Dis Loss: -0.0023/-0.0006
Time elapsed: 31.91 min
Epoch: 076/100 | Batch 000/469 | Gen/Dis Loss: 0.0036/-0.0008
Epoch: 076/100 | Batch 100/469 | Gen/Dis Loss: -0.0003/-0.0001
Epoch: 076/100 | Batch 200/469 | Gen/Dis Loss: 0.0024/-0.0001
Epoch: 076/100 | Batch 300/469 | Gen/Dis Loss: 0.0006/-0.0003
Epoch: 076/100 | Batch 400/469 | Gen/Dis Loss: -0.0000/0.0000
Time elapsed: 32.23 min
Epoch: 077/100 | Batch 000/469 | Gen/Dis Loss: -0.0022/0.0005
Epoch: 077/100 | Batch 100/469 | Gen/Dis Loss: 0.0091/-0.0000
Epoch: 077/100 | Batch 200/469 | Gen/Dis Loss: 0.0090/-0.0004
Epoch: 077/100 | Batch 300/469 | Gen/Dis Loss: -0.0045/-0.0001
Epoch: 077/100 | Batch 400/469 | Gen/Dis Loss: 0.0035/0.0006
Time elapsed: 32.53 min
Epoch: 078/100 | Batch 000/469 | Gen/Dis Loss: 0.0089/0.0001
Epoch: 078/100 | Batch 100/469 | Gen/Dis Loss: 0.0075/-0.0003
Epoch: 078/100 | Batch 200/469 | Gen/Dis Loss: -0.0023/-0.0014
Epoch: 078/100 | Batch 300/469 | Gen/Dis Loss: 0.0030/-0.0012
Epoch: 078/100 | Batch 400/469 | Gen/Dis Loss: -0.0115/0.0000
Time elapsed: 32.84 min
Epoch: 079/100 | Batch 000/469 | Gen/Dis Loss: -0.0055/0.0006
Epoch: 079/100 | Batch 100/469 | Gen/Dis Loss: -0.0082/-0.0001
Epoch: 079/100 | Batch 200/469 | Gen/Dis Loss: -0.0013/-0.0006
Epoch: 079/100 | Batch 300/469 | Gen/Dis Loss: -0.0147/0.0006
Epoch: 079/100 | Batch 400/469 | Gen/Dis Loss: 0.0019/-0.0005
Time elapsed: 33.15 min
Epoch: 080/100 | Batch 000/469 | Gen/Dis Loss: -0.0017/-0.0001
Epoch: 080/100 | Batch 100/469 | Gen/Dis Loss: -0.0035/-0.0014
Epoch: 080/100 | Batch 200/469 | Gen/Dis Loss: -0.0055/0.0005
Epoch: 080/100 | Batch 300/469 | Gen/Dis Loss: 0.0093/0.0001
Epoch: 080/100 | Batch 400/469 | Gen/Dis Loss: 0.0036/-0.0003
Time elapsed: 33.45 min
Epoch: 081/100 | Batch 000/469 | Gen/Dis Loss: -0.0003/-0.0008
Epoch: 081/100 | Batch 100/469 | Gen/Dis Loss: -0.0013/-0.0002
Epoch: 081/100 | Batch 200/469 | Gen/Dis Loss: -0.0011/0.0001
Epoch: 081/100 | Batch 300/469 | Gen/Dis Loss: 0.0014/-0.0009
Epoch: 081/100 | Batch 400/469 | Gen/Dis Loss: -0.0065/0.0005
Time elapsed: 33.76 min
Epoch: 082/100 | Batch 000/469 | Gen/Dis Loss: 0.0072/-0.0007
Epoch: 082/100 | Batch 100/469 | Gen/Dis Loss: 0.0079/-0.0005
Epoch: 082/100 | Batch 200/469 | Gen/Dis Loss: -0.0043/-0.0005
Epoch: 082/100 | Batch 300/469 | Gen/Dis Loss: -0.0119/0.0002
Epoch: 082/100 | Batch 400/469 | Gen/Dis Loss: -0.0008/-0.0007
Time elapsed: 34.06 min
Epoch: 083/100 | Batch 000/469 | Gen/Dis Loss: -0.0010/-0.0015
Epoch: 083/100 | Batch 100/469 | Gen/Dis Loss: 0.0126/-0.0000
Epoch: 083/100 | Batch 200/469 | Gen/Dis Loss: -0.0006/-0.0008
Epoch: 083/100 | Batch 300/469 | Gen/Dis Loss: 0.0055/-0.0005
Epoch: 083/100 | Batch 400/469 | Gen/Dis Loss: 0.0085/-0.0000
Time elapsed: 34.37 min
Epoch: 084/100 | Batch 000/469 | Gen/Dis Loss: -0.0085/-0.0003
Epoch: 084/100 | Batch 100/469 | Gen/Dis Loss: -0.0008/-0.0001
Epoch: 084/100 | Batch 200/469 | Gen/Dis Loss: 0.0046/-0.0001
Epoch: 084/100 | Batch 300/469 | Gen/Dis Loss: -0.0052/-0.0002
Epoch: 084/100 | Batch 400/469 | Gen/Dis Loss: -0.0037/-0.0002
Time elapsed: 34.67 min
Epoch: 085/100 | Batch 000/469 | Gen/Dis Loss: -0.0008/-0.0006
Epoch: 085/100 | Batch 100/469 | Gen/Dis Loss: -0.0061/-0.0001
Epoch: 085/100 | Batch 200/469 | Gen/Dis Loss: -0.0102/0.0001
Epoch: 085/100 | Batch 300/469 | Gen/Dis Loss: 0.0008/-0.0008
Epoch: 085/100 | Batch 400/469 | Gen/Dis Loss: -0.0019/-0.0004
Time elapsed: 34.99 min
Epoch: 086/100 | Batch 000/469 | Gen/Dis Loss: -0.0029/0.0001
Epoch: 086/100 | Batch 100/469 | Gen/Dis Loss: 0.0046/-0.0001
Epoch: 086/100 | Batch 200/469 | Gen/Dis Loss: -0.0042/-0.0005
Epoch: 086/100 | Batch 300/469 | Gen/Dis Loss: -0.0082/-0.0002
Epoch: 086/100 | Batch 400/469 | Gen/Dis Loss: -0.0093/-0.0004
Time elapsed: 35.29 min
Epoch: 087/100 | Batch 000/469 | Gen/Dis Loss: -0.0035/0.0002
Epoch: 087/100 | Batch 100/469 | Gen/Dis Loss: -0.0071/0.0000
Epoch: 087/100 | Batch 200/469 | Gen/Dis Loss: 0.0018/0.0002
Epoch: 087/100 | Batch 300/469 | Gen/Dis Loss: -0.0019/-0.0004
Epoch: 087/100 | Batch 400/469 | Gen/Dis Loss: 0.0075/-0.0002
Time elapsed: 35.60 min
Epoch: 088/100 | Batch 000/469 | Gen/Dis Loss: 0.0017/-0.0003
Epoch: 088/100 | Batch 100/469 | Gen/Dis Loss: 0.0024/-0.0005
Epoch: 088/100 | Batch 200/469 | Gen/Dis Loss: -0.0023/-0.0003
Epoch: 088/100 | Batch 300/469 | Gen/Dis Loss: 0.0001/-0.0005
Epoch: 088/100 | Batch 400/469 | Gen/Dis Loss: -0.0027/-0.0003
Time elapsed: 35.91 min
Epoch: 089/100 | Batch 000/469 | Gen/Dis Loss: -0.0039/-0.0007
Epoch: 089/100 | Batch 100/469 | Gen/Dis Loss: -0.0031/-0.0003
Epoch: 089/100 | Batch 200/469 | Gen/Dis Loss: 0.0024/-0.0003
Epoch: 089/100 | Batch 300/469 | Gen/Dis Loss: -0.0041/-0.0001
Epoch: 089/100 | Batch 400/469 | Gen/Dis Loss: 0.0014/-0.0005
Time elapsed: 36.22 min
Epoch: 090/100 | Batch 000/469 | Gen/Dis Loss: -0.0012/-0.0004
Epoch: 090/100 | Batch 100/469 | Gen/Dis Loss: -0.0022/-0.0004
Epoch: 090/100 | Batch 200/469 | Gen/Dis Loss: -0.0083/-0.0005
Epoch: 090/100 | Batch 300/469 | Gen/Dis Loss: -0.0047/-0.0004
Epoch: 090/100 | Batch 400/469 | Gen/Dis Loss: 0.0001/-0.0003
Time elapsed: 36.52 min
Epoch: 091/100 | Batch 000/469 | Gen/Dis Loss: -0.0013/-0.0003
Epoch: 091/100 | Batch 100/469 | Gen/Dis Loss: -0.0040/-0.0005
Epoch: 091/100 | Batch 200/469 | Gen/Dis Loss: -0.0029/-0.0003
Epoch: 091/100 | Batch 300/469 | Gen/Dis Loss: -0.0026/-0.0003
Epoch: 091/100 | Batch 400/469 | Gen/Dis Loss: -0.0001/-0.0002
Time elapsed: 36.84 min
Epoch: 092/100 | Batch 000/469 | Gen/Dis Loss: 0.0051/-0.0004
Epoch: 092/100 | Batch 100/469 | Gen/Dis Loss: -0.0005/-0.0003
Epoch: 092/100 | Batch 200/469 | Gen/Dis Loss: 0.0041/-0.0004
Epoch: 092/100 | Batch 300/469 | Gen/Dis Loss: 0.0020/-0.0004
Epoch: 092/100 | Batch 400/469 | Gen/Dis Loss: 0.0004/-0.0003
Time elapsed: 37.15 min
Epoch: 093/100 | Batch 000/469 | Gen/Dis Loss: -0.0005/-0.0003
Epoch: 093/100 | Batch 100/469 | Gen/Dis Loss: 0.0008/-0.0004
Epoch: 093/100 | Batch 200/469 | Gen/Dis Loss: -0.0013/-0.0004
Epoch: 093/100 | Batch 300/469 | Gen/Dis Loss: -0.0007/-0.0004
Epoch: 093/100 | Batch 400/469 | Gen/Dis Loss: -0.0013/-0.0002
Time elapsed: 37.45 min
Epoch: 094/100 | Batch 000/469 | Gen/Dis Loss: -0.0017/-0.0003
Epoch: 094/100 | Batch 100/469 | Gen/Dis Loss: -0.0018/-0.0003
Epoch: 094/100 | Batch 200/469 | Gen/Dis Loss: -0.0018/-0.0003
Epoch: 094/100 | Batch 300/469 | Gen/Dis Loss: -0.0017/-0.0003
Epoch: 094/100 | Batch 400/469 | Gen/Dis Loss: -0.0019/-0.0003
Time elapsed: 37.75 min
Epoch: 095/100 | Batch 000/469 | Gen/Dis Loss: -0.0026/-0.0003
Epoch: 095/100 | Batch 100/469 | Gen/Dis Loss: -0.0022/-0.0003
Epoch: 095/100 | Batch 200/469 | Gen/Dis Loss: -0.0014/-0.0003
Epoch: 095/100 | Batch 300/469 | Gen/Dis Loss: -0.0005/-0.0002
Epoch: 095/100 | Batch 400/469 | Gen/Dis Loss: -0.0008/-0.0002
Time elapsed: 38.06 min
Epoch: 096/100 | Batch 000/469 | Gen/Dis Loss: -0.0002/-0.0003
Epoch: 096/100 | Batch 100/469 | Gen/Dis Loss: 0.0011/-0.0003
Epoch: 096/100 | Batch 200/469 | Gen/Dis Loss: 0.0006/-0.0003
Epoch: 096/100 | Batch 300/469 | Gen/Dis Loss: 0.0019/-0.0004
Epoch: 096/100 | Batch 400/469 | Gen/Dis Loss: 0.0012/-0.0003
Time elapsed: 38.37 min
Epoch: 097/100 | Batch 000/469 | Gen/Dis Loss: -0.0001/-0.0003
Epoch: 097/100 | Batch 100/469 | Gen/Dis Loss: -0.0007/-0.0004
Epoch: 097/100 | Batch 200/469 | Gen/Dis Loss: -0.0015/-0.0003
Epoch: 097/100 | Batch 300/469 | Gen/Dis Loss: -0.0034/-0.0003
Epoch: 097/100 | Batch 400/469 | Gen/Dis Loss: -0.0007/-0.0004
Time elapsed: 38.67 min
Epoch: 098/100 | Batch 000/469 | Gen/Dis Loss: -0.0004/-0.0003
Epoch: 098/100 | Batch 100/469 | Gen/Dis Loss: -0.0015/-0.0004
Epoch: 098/100 | Batch 200/469 | Gen/Dis Loss: -0.0017/-0.0002
Epoch: 098/100 | Batch 300/469 | Gen/Dis Loss: -0.0014/-0.0004
Epoch: 098/100 | Batch 400/469 | Gen/Dis Loss: -0.0037/-0.0004
Time elapsed: 38.97 min
Epoch: 099/100 | Batch 000/469 | Gen/Dis Loss: -0.0055/-0.0003
Epoch: 099/100 | Batch 100/469 | Gen/Dis Loss: -0.0039/-0.0003
Epoch: 099/100 | Batch 200/469 | Gen/Dis Loss: -0.0037/-0.0003
Epoch: 099/100 | Batch 300/469 | Gen/Dis Loss: -0.0041/-0.0003
Epoch: 099/100 | Batch 400/469 | Gen/Dis Loss: -0.0044/-0.0003
Time elapsed: 39.27 min
Epoch: 100/100 | Batch 000/469 | Gen/Dis Loss: -0.0041/-0.0002
Epoch: 100/100 | Batch 100/469 | Gen/Dis Loss: -0.0024/-0.0003
Epoch: 100/100 | Batch 200/469 | Gen/Dis Loss: -0.0021/-0.0002
Epoch: 100/100 | Batch 300/469 | Gen/Dis Loss: -0.0033/-0.0002
Epoch: 100/100 | Batch 400/469 | Gen/Dis Loss: -0.0035/-0.0002
Time elapsed: 39.57 min
Total Training Time: 39.57 min
In [9]:
### For Debugging

"""
for i in outputs:
    print(i.size())
"""
Out[9]:
'\nfor i in outputs:\n    print(i.size())\n'

Evaluation

In [10]:
%matplotlib inline
import matplotlib.pyplot as plt
In [11]:
ax1 = plt.subplot(1, 1, 1)
ax1.plot(range(len(gener_costs)), gener_costs, label='Generator loss')
ax1.plot(range(len(discr_costs)), discr_costs, label='Discriminator loss')
ax1.set_xlabel('Iterations')
ax1.set_ylabel('Loss')
ax1.legend()

###################
# Set scond x-axis
ax2 = ax1.twiny()
newlabel = list(range(NUM_EPOCHS+1))
iter_per_epoch = len(train_loader)
newpos = [e*iter_per_epoch for e in newlabel]

ax2.set_xticklabels(newlabel[::10])
ax2.set_xticks(newpos[::10])

ax2.xaxis.set_ticks_position('bottom')
ax2.xaxis.set_label_position('bottom')
ax2.spines['bottom'].set_position(('outward', 45))
ax2.set_xlabel('Epochs')
ax2.set_xlim(ax1.get_xlim())
###################

plt.show()
In [15]:
##########################
### VISUALIZATION
##########################


model.eval()
# Make new images
z = torch.zeros((10, LATENT_DIM)).uniform_(0.0, 1.0).to(device)
generated_features = model.generator_forward(z)
imgs = generated_features.view(-1, 28, 28)

fig, axes = plt.subplots(nrows=1, ncols=10, figsize=(20, 2.5))


for i, ax in enumerate(axes):
    axes[i].imshow(imgs[i].to(torch.device('cpu')).detach(), cmap='binary')
In [16]:
from torchsummary import summary
model = model.to('cuda:0')
summary(model.generator, input_size=(100,))
summary(model.discriminator, input_size=(1, 28, 28))
----------------------------------------------------------------
        Layer (type)               Output Shape         Param #
================================================================
            Linear-1                 [-1, 3136]         313,600
       BatchNorm1d-2                 [-1, 3136]           6,272
         LeakyReLU-3                 [-1, 3136]               0
          Reshape1-4             [-1, 64, 7, 7]               0
   ConvTranspose2d-5           [-1, 32, 13, 13]          18,432
       BatchNorm2d-6           [-1, 32, 13, 13]              64
         LeakyReLU-7           [-1, 32, 13, 13]               0
   ConvTranspose2d-8           [-1, 16, 25, 25]           4,608
       BatchNorm2d-9           [-1, 16, 25, 25]              32
        LeakyReLU-10           [-1, 16, 25, 25]               0
  ConvTranspose2d-11            [-1, 8, 27, 27]           1,152
      BatchNorm2d-12            [-1, 8, 27, 27]              16
        LeakyReLU-13            [-1, 8, 27, 27]               0
  ConvTranspose2d-14            [-1, 1, 28, 28]              32
             Tanh-15            [-1, 1, 28, 28]               0
================================================================
Total params: 344,208
Trainable params: 344,208
Non-trainable params: 0
----------------------------------------------------------------
Input size (MB): 0.00
Forward/backward pass size (MB): 0.59
Params size (MB): 1.31
Estimated Total Size (MB): 1.91
----------------------------------------------------------------
----------------------------------------------------------------
        Layer (type)               Output Shape         Param #
================================================================
            Conv2d-1            [-1, 8, 14, 14]              72
       BatchNorm2d-2            [-1, 8, 14, 14]              16
         LeakyReLU-3            [-1, 8, 14, 14]               0
            Conv2d-4             [-1, 16, 7, 7]           1,152
       BatchNorm2d-5             [-1, 16, 7, 7]              32
         LeakyReLU-6             [-1, 16, 7, 7]               0
            Conv2d-7             [-1, 32, 4, 4]           4,608
       BatchNorm2d-8             [-1, 32, 4, 4]              64
         LeakyReLU-9             [-1, 32, 4, 4]               0
          Flatten-10                  [-1, 512]               0
           Linear-11                    [-1, 1]             513
================================================================
Total params: 6,457
Trainable params: 6,457
Non-trainable params: 0
----------------------------------------------------------------
Input size (MB): 0.00
Forward/backward pass size (MB): 0.07
Params size (MB): 0.02
Estimated Total Size (MB): 0.10
----------------------------------------------------------------