Deep Learning Models -- A collection of various deep learning architectures, models, and tips for TensorFlow and PyTorch in Jupyter Notebooks.

In [1]:
%load_ext watermark
%watermark -a 'Sebastian Raschka' -v -p torch
Sebastian Raschka 

CPython 3.7.3
IPython 7.6.1

torch 1.2.0
  • Runs on CPU or GPU (if available)

Convolutional GAN with Label Smoothing

Same as ./gan-conv.ipynb but with label smoothing.

Here, the label smoothing approach is to replace real image labels (1's) by 0.9, based on the idea in

  • Salimans, Tim, Ian Goodfellow, Wojciech Zaremba, Vicki Cheung, Alec Radford, and Xi Chen. "Improved techniques for training GANs." In Advances in Neural Information Processing Systems, pp. 2234-2242. 2016.

Imports

In [2]:
import time
import numpy as np
import torch
import torch.nn.functional as F
from torchvision import datasets
from torchvision import transforms
import torch.nn as nn
from torch.utils.data import DataLoader


if torch.cuda.is_available():
    torch.backends.cudnn.deterministic = True

Settings and Dataset

In [3]:
##########################
### SETTINGS
##########################

# Device
device = torch.device("cuda:3" if torch.cuda.is_available() else "cpu")

# Hyperparameters
random_seed = 1
generator_learning_rate = 0.0001
discriminator_learning_rate = 0.0001
NUM_EPOCHS = 100
BATCH_SIZE = 128
LATENT_DIM = 100
IMG_SHAPE = (1, 28, 28)
IMG_SIZE = 1
for x in IMG_SHAPE:
    IMG_SIZE *= x



##########################
### MNIST DATASET
##########################

# Note transforms.ToTensor() scales input images
# to 0-1 range
train_dataset = datasets.MNIST(root='data', 
                               train=True, 
                               transform=transforms.ToTensor(),
                               download=True)

test_dataset = datasets.MNIST(root='data', 
                              train=False, 
                              transform=transforms.ToTensor())


train_loader = DataLoader(dataset=train_dataset, 
                          batch_size=BATCH_SIZE,
                          num_workers=4,
                          shuffle=True)

test_loader = DataLoader(dataset=test_dataset, 
                         batch_size=BATCH_SIZE,
                         num_workers=4,
                         shuffle=False)

# Checking the dataset
for images, labels in train_loader:  
    print('Image batch dimensions:', images.shape)
    print('Image label dimensions:', labels.shape)
    break
Image batch dimensions: torch.Size([128, 1, 28, 28])
Image label dimensions: torch.Size([128])

Model

In [4]:
##########################
### MODEL
##########################

class Flatten(nn.Module):
    def forward(self, input):
        return input.view(input.size(0), -1)
    
class Reshape1(nn.Module):
    def forward(self, input):
        return input.view(input.size(0), 64, 7, 7)


class GAN(torch.nn.Module):

    def __init__(self):
        super(GAN, self).__init__()
        
        
        self.generator = nn.Sequential(
              
            nn.Linear(LATENT_DIM, 3136, bias=False),
            nn.BatchNorm1d(num_features=3136),
            nn.LeakyReLU(inplace=True, negative_slope=0.0001),
            Reshape1(),
            
            nn.ConvTranspose2d(in_channels=64, out_channels=32, kernel_size=(3, 3), stride=(2, 2), padding=1, bias=False),
            nn.BatchNorm2d(num_features=32),
            nn.LeakyReLU(inplace=True, negative_slope=0.0001),
            #nn.Dropout2d(p=0.2),
            
            nn.ConvTranspose2d(in_channels=32, out_channels=16, kernel_size=(3, 3), stride=(2, 2), padding=1, bias=False),
            nn.BatchNorm2d(num_features=16),
            nn.LeakyReLU(inplace=True, negative_slope=0.0001),
            #nn.Dropout2d(p=0.2),
            
            nn.ConvTranspose2d(in_channels=16, out_channels=8, kernel_size=(3, 3), stride=(1, 1), padding=0, bias=False),
            nn.BatchNorm2d(num_features=8),
            nn.LeakyReLU(inplace=True, negative_slope=0.0001),
            #nn.Dropout2d(p=0.2),
            
            nn.ConvTranspose2d(in_channels=8, out_channels=1, kernel_size=(2, 2), stride=(1, 1), padding=0, bias=False),
            nn.Tanh()
        )
        
        self.discriminator = nn.Sequential(
            nn.Conv2d(in_channels=1, out_channels=8, padding=1, kernel_size=(3, 3), stride=(2, 2), bias=False),
            nn.BatchNorm2d(num_features=8),
            nn.LeakyReLU(inplace=True, negative_slope=0.0001), 
            #nn.Dropout2d(p=0.2),
            
            nn.Conv2d(in_channels=8, out_channels=32, padding=1, kernel_size=(3, 3), stride=(2, 2), bias=False),
            nn.BatchNorm2d(num_features=32),
            nn.LeakyReLU(inplace=True, negative_slope=0.0001), 
            #nn.Dropout2d(p=0.2),
            
            Flatten(),

            nn.Linear(7*7*32, 1),
            #nn.Sigmoid()
        )

            
    def generator_forward(self, z):
        img = self.generator(z)
        return img
    
    def discriminator_forward(self, img):
        pred = model.discriminator(img)
        return pred.view(-1)
In [5]:
torch.manual_seed(random_seed)

#del model
model = GAN()
model = model.to(device)

print(model)
GAN(
  (generator): Sequential(
    (0): Linear(in_features=100, out_features=3136, bias=False)
    (1): BatchNorm1d(3136, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (2): LeakyReLU(negative_slope=0.0001, inplace=True)
    (3): Reshape1()
    (4): ConvTranspose2d(64, 32, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
    (5): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (6): LeakyReLU(negative_slope=0.0001, inplace=True)
    (7): ConvTranspose2d(32, 16, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
    (8): BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (9): LeakyReLU(negative_slope=0.0001, inplace=True)
    (10): ConvTranspose2d(16, 8, kernel_size=(3, 3), stride=(1, 1), bias=False)
    (11): BatchNorm2d(8, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (12): LeakyReLU(negative_slope=0.0001, inplace=True)
    (13): ConvTranspose2d(8, 1, kernel_size=(2, 2), stride=(1, 1), bias=False)
    (14): Tanh()
  )
  (discriminator): Sequential(
    (0): Conv2d(1, 8, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
    (1): BatchNorm2d(8, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (2): LeakyReLU(negative_slope=0.0001, inplace=True)
    (3): Conv2d(8, 32, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
    (4): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (5): LeakyReLU(negative_slope=0.0001, inplace=True)
    (6): Flatten()
    (7): Linear(in_features=1568, out_features=1, bias=True)
  )
)
In [6]:
### ## FOR DEBUGGING

"""
outputs = []
def hook(module, input, output):
    outputs.append(output)

#for i, layer in enumerate(model.discriminator):
#    if isinstance(layer, torch.nn.modules.conv.Conv2d):
#        model.discriminator[i].register_forward_hook(hook)

for i, layer in enumerate(model.generator):
    if isinstance(layer, torch.nn.modules.ConvTranspose2d):
        model.generator[i].register_forward_hook(hook)
"""
Out[6]:
'\noutputs = []\ndef hook(module, input, output):\n    outputs.append(output)\n\n#for i, layer in enumerate(model.discriminator):\n#    if isinstance(layer, torch.nn.modules.conv.Conv2d):\n#        model.discriminator[i].register_forward_hook(hook)\n\nfor i, layer in enumerate(model.generator):\n    if isinstance(layer, torch.nn.modules.ConvTranspose2d):\n        model.generator[i].register_forward_hook(hook)\n'
In [7]:
optim_gener = torch.optim.Adam(model.generator.parameters(), lr=generator_learning_rate)
optim_discr = torch.optim.Adam(model.discriminator.parameters(), lr=discriminator_learning_rate)

Training

In [8]:
start_time = time.time()    

discr_costs = []
gener_costs = []
for epoch in range(NUM_EPOCHS):
    model = model.train()
    for batch_idx, (features, targets) in enumerate(train_loader):

        
        # Normalize images to [-1, 1] range
        features = (features - 0.5)*2.
        features = features.view(-1, IMG_SIZE).to(device) 

        targets = targets.to(device)

        valid = torch.ones(targets.size(0)).float().to(device)
        fake = torch.zeros(targets.size(0)).float().to(device)
        

        ### FORWARD AND BACK PROP
        
        
        # --------------------------
        # Train Generator
        # --------------------------
        
        # Make new images
        z = torch.zeros((targets.size(0), LATENT_DIM)).uniform_(-1.0, 1.0).to(device)
        generated_features = model.generator_forward(z)
        
        # Loss for fooling the discriminator
        discr_pred = model.discriminator_forward(generated_features.view(targets.size(0), 1, 28, 28))
        
        gener_loss = F.binary_cross_entropy_with_logits(discr_pred, valid*0.9)
        
        optim_gener.zero_grad()
        gener_loss.backward()
        optim_gener.step()
        
        # --------------------------
        # Train Discriminator
        # --------------------------        
        
        discr_pred_real = model.discriminator_forward(features.view(targets.size(0), 1, 28, 28))
        real_loss = F.binary_cross_entropy_with_logits(discr_pred_real, valid*0.9)
        
        discr_pred_fake = model.discriminator_forward(generated_features.view(targets.size(0), 1, 28, 28).detach())
        fake_loss = F.binary_cross_entropy_with_logits(discr_pred_fake, fake)
        
        discr_loss = 0.5*(real_loss + fake_loss)

        optim_discr.zero_grad()
        discr_loss.backward()
        optim_discr.step()        
        
        discr_costs.append(discr_loss.item())
        gener_costs.append(gener_loss.item())
        
        
        ### LOGGING
        if not batch_idx % 100:
            print ('Epoch: %03d/%03d | Batch %03d/%03d | Gen/Dis Loss: %.4f/%.4f' 
                   %(epoch+1, NUM_EPOCHS, batch_idx, 
                     len(train_loader), gener_loss, discr_loss))

    print('Time elapsed: %.2f min' % ((time.time() - start_time)/60))
    
print('Total Training Time: %.2f min' % ((time.time() - start_time)/60))
Epoch: 001/100 | Batch 000/469 | Gen/Dis Loss: 0.5320/0.7922
Epoch: 001/100 | Batch 100/469 | Gen/Dis Loss: 1.4870/0.3290
Epoch: 001/100 | Batch 200/469 | Gen/Dis Loss: 1.6836/0.2914
Epoch: 001/100 | Batch 300/469 | Gen/Dis Loss: 1.6206/0.3252
Epoch: 001/100 | Batch 400/469 | Gen/Dis Loss: 1.3477/0.3873
Time elapsed: 0.10 min
Epoch: 002/100 | Batch 000/469 | Gen/Dis Loss: 1.1881/0.4570
Epoch: 002/100 | Batch 100/469 | Gen/Dis Loss: 1.0543/0.5261
Epoch: 002/100 | Batch 200/469 | Gen/Dis Loss: 1.0355/0.5480
Epoch: 002/100 | Batch 300/469 | Gen/Dis Loss: 1.0210/0.5513
Epoch: 002/100 | Batch 400/469 | Gen/Dis Loss: 1.0446/0.5411
Time elapsed: 0.20 min
Epoch: 003/100 | Batch 000/469 | Gen/Dis Loss: 0.9950/0.5798
Epoch: 003/100 | Batch 100/469 | Gen/Dis Loss: 1.0264/0.5392
Epoch: 003/100 | Batch 200/469 | Gen/Dis Loss: 1.0358/0.5289
Epoch: 003/100 | Batch 300/469 | Gen/Dis Loss: 1.0154/0.5382
Epoch: 003/100 | Batch 400/469 | Gen/Dis Loss: 1.0329/0.5386
Time elapsed: 0.30 min
Epoch: 004/100 | Batch 000/469 | Gen/Dis Loss: 0.9942/0.5350
Epoch: 004/100 | Batch 100/469 | Gen/Dis Loss: 0.9935/0.5596
Epoch: 004/100 | Batch 200/469 | Gen/Dis Loss: 1.0723/0.5326
Epoch: 004/100 | Batch 300/469 | Gen/Dis Loss: 0.9673/0.5343
Epoch: 004/100 | Batch 400/469 | Gen/Dis Loss: 0.9634/0.5457
Time elapsed: 0.46 min
Epoch: 005/100 | Batch 000/469 | Gen/Dis Loss: 0.9763/0.5381
Epoch: 005/100 | Batch 100/469 | Gen/Dis Loss: 1.0243/0.5313
Epoch: 005/100 | Batch 200/469 | Gen/Dis Loss: 1.1074/0.4962
Epoch: 005/100 | Batch 300/469 | Gen/Dis Loss: 1.0888/0.5450
Epoch: 005/100 | Batch 400/469 | Gen/Dis Loss: 1.0268/0.5335
Time elapsed: 0.67 min
Epoch: 006/100 | Batch 000/469 | Gen/Dis Loss: 1.0481/0.5334
Epoch: 006/100 | Batch 100/469 | Gen/Dis Loss: 1.0659/0.5359
Epoch: 006/100 | Batch 200/469 | Gen/Dis Loss: 1.0606/0.5353
Epoch: 006/100 | Batch 300/469 | Gen/Dis Loss: 1.0148/0.5581
Epoch: 006/100 | Batch 400/469 | Gen/Dis Loss: 1.0480/0.5270
Time elapsed: 0.87 min
Epoch: 007/100 | Batch 000/469 | Gen/Dis Loss: 1.1187/0.5311
Epoch: 007/100 | Batch 100/469 | Gen/Dis Loss: 1.0766/0.5436
Epoch: 007/100 | Batch 200/469 | Gen/Dis Loss: 1.0922/0.5150
Epoch: 007/100 | Batch 300/469 | Gen/Dis Loss: 1.0813/0.5352
Epoch: 007/100 | Batch 400/469 | Gen/Dis Loss: 1.0612/0.5482
Time elapsed: 1.06 min
Epoch: 008/100 | Batch 000/469 | Gen/Dis Loss: 1.1072/0.5301
Epoch: 008/100 | Batch 100/469 | Gen/Dis Loss: 1.0544/0.5223
Epoch: 008/100 | Batch 200/469 | Gen/Dis Loss: 1.1581/0.4927
Epoch: 008/100 | Batch 300/469 | Gen/Dis Loss: 1.0709/0.5446
Epoch: 008/100 | Batch 400/469 | Gen/Dis Loss: 1.0505/0.5157
Time elapsed: 1.25 min
Epoch: 009/100 | Batch 000/469 | Gen/Dis Loss: 1.0234/0.5477
Epoch: 009/100 | Batch 100/469 | Gen/Dis Loss: 1.0853/0.5574
Epoch: 009/100 | Batch 200/469 | Gen/Dis Loss: 1.0813/0.5295
Epoch: 009/100 | Batch 300/469 | Gen/Dis Loss: 1.1477/0.5230
Epoch: 009/100 | Batch 400/469 | Gen/Dis Loss: 1.0494/0.5382
Time elapsed: 1.46 min
Epoch: 010/100 | Batch 000/469 | Gen/Dis Loss: 1.0925/0.5289
Epoch: 010/100 | Batch 100/469 | Gen/Dis Loss: 1.1235/0.5244
Epoch: 010/100 | Batch 200/469 | Gen/Dis Loss: 0.9995/0.5589
Epoch: 010/100 | Batch 300/469 | Gen/Dis Loss: 1.1845/0.5210
Epoch: 010/100 | Batch 400/469 | Gen/Dis Loss: 1.0900/0.5516
Time elapsed: 1.65 min
Epoch: 011/100 | Batch 000/469 | Gen/Dis Loss: 1.0603/0.5348
Epoch: 011/100 | Batch 100/469 | Gen/Dis Loss: 1.0729/0.5681
Epoch: 011/100 | Batch 200/469 | Gen/Dis Loss: 1.0606/0.5342
Epoch: 011/100 | Batch 300/469 | Gen/Dis Loss: 1.1485/0.5209
Epoch: 011/100 | Batch 400/469 | Gen/Dis Loss: 1.0348/0.5702
Time elapsed: 1.84 min
Epoch: 012/100 | Batch 000/469 | Gen/Dis Loss: 1.0810/0.5220
Epoch: 012/100 | Batch 100/469 | Gen/Dis Loss: 1.1326/0.5712
Epoch: 012/100 | Batch 200/469 | Gen/Dis Loss: 1.0990/0.5533
Epoch: 012/100 | Batch 300/469 | Gen/Dis Loss: 1.0621/0.5568
Epoch: 012/100 | Batch 400/469 | Gen/Dis Loss: 1.1725/0.5415
Time elapsed: 2.03 min
Epoch: 013/100 | Batch 000/469 | Gen/Dis Loss: 1.1411/0.5407
Epoch: 013/100 | Batch 100/469 | Gen/Dis Loss: 1.0399/0.5808
Epoch: 013/100 | Batch 200/469 | Gen/Dis Loss: 1.0423/0.5356
Epoch: 013/100 | Batch 300/469 | Gen/Dis Loss: 1.1102/0.5274
Epoch: 013/100 | Batch 400/469 | Gen/Dis Loss: 1.0668/0.5330
Time elapsed: 2.22 min
Epoch: 014/100 | Batch 000/469 | Gen/Dis Loss: 1.0714/0.5416
Epoch: 014/100 | Batch 100/469 | Gen/Dis Loss: 1.0760/0.5599
Epoch: 014/100 | Batch 200/469 | Gen/Dis Loss: 1.1349/0.5670
Epoch: 014/100 | Batch 300/469 | Gen/Dis Loss: 1.1845/0.5673
Epoch: 014/100 | Batch 400/469 | Gen/Dis Loss: 1.1341/0.5306
Time elapsed: 2.41 min
Epoch: 015/100 | Batch 000/469 | Gen/Dis Loss: 1.0499/0.6021
Epoch: 015/100 | Batch 100/469 | Gen/Dis Loss: 1.1554/0.5127
Epoch: 015/100 | Batch 200/469 | Gen/Dis Loss: 1.0708/0.5380
Epoch: 015/100 | Batch 300/469 | Gen/Dis Loss: 1.0891/0.5775
Epoch: 015/100 | Batch 400/469 | Gen/Dis Loss: 1.0872/0.5662
Time elapsed: 2.60 min
Epoch: 016/100 | Batch 000/469 | Gen/Dis Loss: 1.0491/0.5729
Epoch: 016/100 | Batch 100/469 | Gen/Dis Loss: 1.0154/0.5615
Epoch: 016/100 | Batch 200/469 | Gen/Dis Loss: 0.9762/0.5606
Epoch: 016/100 | Batch 300/469 | Gen/Dis Loss: 1.1872/0.5477
Epoch: 016/100 | Batch 400/469 | Gen/Dis Loss: 1.0967/0.5339
Time elapsed: 2.80 min
Epoch: 017/100 | Batch 000/469 | Gen/Dis Loss: 1.1712/0.5244
Epoch: 017/100 | Batch 100/469 | Gen/Dis Loss: 1.0325/0.5837
Epoch: 017/100 | Batch 200/469 | Gen/Dis Loss: 1.1879/0.5272
Epoch: 017/100 | Batch 300/469 | Gen/Dis Loss: 1.1488/0.5301
Epoch: 017/100 | Batch 400/469 | Gen/Dis Loss: 1.1142/0.5530
Time elapsed: 2.99 min
Epoch: 018/100 | Batch 000/469 | Gen/Dis Loss: 1.1366/0.5606
Epoch: 018/100 | Batch 100/469 | Gen/Dis Loss: 1.1510/0.5812
Epoch: 018/100 | Batch 200/469 | Gen/Dis Loss: 1.0945/0.5411
Epoch: 018/100 | Batch 300/469 | Gen/Dis Loss: 1.0829/0.5528
Epoch: 018/100 | Batch 400/469 | Gen/Dis Loss: 1.0869/0.5694
Time elapsed: 3.17 min
Epoch: 019/100 | Batch 000/469 | Gen/Dis Loss: 1.1290/0.5084
Epoch: 019/100 | Batch 100/469 | Gen/Dis Loss: 1.0626/0.5566
Epoch: 019/100 | Batch 200/469 | Gen/Dis Loss: 1.0842/0.5612
Epoch: 019/100 | Batch 300/469 | Gen/Dis Loss: 1.0065/0.5908
Epoch: 019/100 | Batch 400/469 | Gen/Dis Loss: 0.9998/0.5711
Time elapsed: 3.37 min
Epoch: 020/100 | Batch 000/469 | Gen/Dis Loss: 1.0631/0.5676
Epoch: 020/100 | Batch 100/469 | Gen/Dis Loss: 1.0952/0.6067
Epoch: 020/100 | Batch 200/469 | Gen/Dis Loss: 1.1171/0.5492
Epoch: 020/100 | Batch 300/469 | Gen/Dis Loss: 1.0411/0.5744
Epoch: 020/100 | Batch 400/469 | Gen/Dis Loss: 1.0759/0.5379
Time elapsed: 3.59 min
Epoch: 021/100 | Batch 000/469 | Gen/Dis Loss: 1.0226/0.5858
Epoch: 021/100 | Batch 100/469 | Gen/Dis Loss: 1.1105/0.5660
Epoch: 021/100 | Batch 200/469 | Gen/Dis Loss: 1.0240/0.6026
Epoch: 021/100 | Batch 300/469 | Gen/Dis Loss: 1.1665/0.5466
Epoch: 021/100 | Batch 400/469 | Gen/Dis Loss: 1.1050/0.6089
Time elapsed: 3.78 min
Epoch: 022/100 | Batch 000/469 | Gen/Dis Loss: 1.0061/0.5985
Epoch: 022/100 | Batch 100/469 | Gen/Dis Loss: 1.1183/0.5675
Epoch: 022/100 | Batch 200/469 | Gen/Dis Loss: 1.0606/0.5838
Epoch: 022/100 | Batch 300/469 | Gen/Dis Loss: 1.1386/0.6011
Epoch: 022/100 | Batch 400/469 | Gen/Dis Loss: 1.1208/0.5674
Time elapsed: 3.96 min
Epoch: 023/100 | Batch 000/469 | Gen/Dis Loss: 1.0082/0.5942
Epoch: 023/100 | Batch 100/469 | Gen/Dis Loss: 0.9857/0.5843
Epoch: 023/100 | Batch 200/469 | Gen/Dis Loss: 0.9313/0.6361
Epoch: 023/100 | Batch 300/469 | Gen/Dis Loss: 1.0739/0.5524
Epoch: 023/100 | Batch 400/469 | Gen/Dis Loss: 1.0840/0.5623
Time elapsed: 4.17 min
Epoch: 024/100 | Batch 000/469 | Gen/Dis Loss: 0.9677/0.5934
Epoch: 024/100 | Batch 100/469 | Gen/Dis Loss: 1.1458/0.5657
Epoch: 024/100 | Batch 200/469 | Gen/Dis Loss: 1.0225/0.5821
Epoch: 024/100 | Batch 300/469 | Gen/Dis Loss: 1.0983/0.5959
Epoch: 024/100 | Batch 400/469 | Gen/Dis Loss: 1.0693/0.5773
Time elapsed: 4.36 min
Epoch: 025/100 | Batch 000/469 | Gen/Dis Loss: 1.0839/0.6029
Epoch: 025/100 | Batch 100/469 | Gen/Dis Loss: 1.0004/0.6399
Epoch: 025/100 | Batch 200/469 | Gen/Dis Loss: 1.0044/0.5849
Epoch: 025/100 | Batch 300/469 | Gen/Dis Loss: 0.9312/0.6572
Epoch: 025/100 | Batch 400/469 | Gen/Dis Loss: 0.9938/0.5910
Time elapsed: 4.55 min
Epoch: 026/100 | Batch 000/469 | Gen/Dis Loss: 0.9641/0.5836
Epoch: 026/100 | Batch 100/469 | Gen/Dis Loss: 1.0659/0.5984
Epoch: 026/100 | Batch 200/469 | Gen/Dis Loss: 1.0551/0.5856
Epoch: 026/100 | Batch 300/469 | Gen/Dis Loss: 1.0279/0.5927
Epoch: 026/100 | Batch 400/469 | Gen/Dis Loss: 0.9705/0.6792
Time elapsed: 4.75 min
Epoch: 027/100 | Batch 000/469 | Gen/Dis Loss: 0.9604/0.6380
Epoch: 027/100 | Batch 100/469 | Gen/Dis Loss: 0.9590/0.6085
Epoch: 027/100 | Batch 200/469 | Gen/Dis Loss: 0.9089/0.6264
Epoch: 027/100 | Batch 300/469 | Gen/Dis Loss: 0.9990/0.5850
Epoch: 027/100 | Batch 400/469 | Gen/Dis Loss: 0.9640/0.6527
Time elapsed: 4.95 min
Epoch: 028/100 | Batch 000/469 | Gen/Dis Loss: 1.0161/0.6081
Epoch: 028/100 | Batch 100/469 | Gen/Dis Loss: 1.0219/0.6123
Epoch: 028/100 | Batch 200/469 | Gen/Dis Loss: 1.0578/0.6017
Epoch: 028/100 | Batch 300/469 | Gen/Dis Loss: 1.0244/0.6309
Epoch: 028/100 | Batch 400/469 | Gen/Dis Loss: 1.0143/0.6120
Time elapsed: 5.14 min
Epoch: 029/100 | Batch 000/469 | Gen/Dis Loss: 0.9761/0.5912
Epoch: 029/100 | Batch 100/469 | Gen/Dis Loss: 0.8794/0.6594
Epoch: 029/100 | Batch 200/469 | Gen/Dis Loss: 0.9777/0.6007
Epoch: 029/100 | Batch 300/469 | Gen/Dis Loss: 0.8981/0.6379
Epoch: 029/100 | Batch 400/469 | Gen/Dis Loss: 0.9711/0.6454
Time elapsed: 5.34 min
Epoch: 030/100 | Batch 000/469 | Gen/Dis Loss: 0.9695/0.6218
Epoch: 030/100 | Batch 100/469 | Gen/Dis Loss: 0.9793/0.6096
Epoch: 030/100 | Batch 200/469 | Gen/Dis Loss: 1.0565/0.6410
Epoch: 030/100 | Batch 300/469 | Gen/Dis Loss: 0.9631/0.6679
Epoch: 030/100 | Batch 400/469 | Gen/Dis Loss: 0.9781/0.6153
Time elapsed: 5.52 min
Epoch: 031/100 | Batch 000/469 | Gen/Dis Loss: 0.9347/0.5942
Epoch: 031/100 | Batch 100/469 | Gen/Dis Loss: 0.9340/0.5984
Epoch: 031/100 | Batch 200/469 | Gen/Dis Loss: 0.9283/0.6416
Epoch: 031/100 | Batch 300/469 | Gen/Dis Loss: 1.0474/0.6204
Epoch: 031/100 | Batch 400/469 | Gen/Dis Loss: 0.9788/0.6065
Time elapsed: 5.73 min
Epoch: 032/100 | Batch 000/469 | Gen/Dis Loss: 0.9352/0.6080
Epoch: 032/100 | Batch 100/469 | Gen/Dis Loss: 0.8874/0.6449
Epoch: 032/100 | Batch 200/469 | Gen/Dis Loss: 0.9189/0.6215
Epoch: 032/100 | Batch 300/469 | Gen/Dis Loss: 0.9705/0.6757
Epoch: 032/100 | Batch 400/469 | Gen/Dis Loss: 0.9382/0.6260
Time elapsed: 5.93 min
Epoch: 033/100 | Batch 000/469 | Gen/Dis Loss: 1.0194/0.6070
Epoch: 033/100 | Batch 100/469 | Gen/Dis Loss: 0.9278/0.5882
Epoch: 033/100 | Batch 200/469 | Gen/Dis Loss: 0.9658/0.5919
Epoch: 033/100 | Batch 300/469 | Gen/Dis Loss: 0.9989/0.6067
Epoch: 033/100 | Batch 400/469 | Gen/Dis Loss: 1.0065/0.6097
Time elapsed: 6.13 min
Epoch: 034/100 | Batch 000/469 | Gen/Dis Loss: 0.8918/0.6565
Epoch: 034/100 | Batch 100/469 | Gen/Dis Loss: 0.9476/0.6564
Epoch: 034/100 | Batch 200/469 | Gen/Dis Loss: 0.9889/0.6139
Epoch: 034/100 | Batch 300/469 | Gen/Dis Loss: 0.9204/0.6231
Epoch: 034/100 | Batch 400/469 | Gen/Dis Loss: 0.9453/0.6558
Time elapsed: 6.32 min
Epoch: 035/100 | Batch 000/469 | Gen/Dis Loss: 0.9531/0.6304
Epoch: 035/100 | Batch 100/469 | Gen/Dis Loss: 0.9402/0.6524
Epoch: 035/100 | Batch 200/469 | Gen/Dis Loss: 1.0085/0.6098
Epoch: 035/100 | Batch 300/469 | Gen/Dis Loss: 0.9529/0.6294
Epoch: 035/100 | Batch 400/469 | Gen/Dis Loss: 0.9086/0.6743
Time elapsed: 6.51 min
Epoch: 036/100 | Batch 000/469 | Gen/Dis Loss: 0.9465/0.6449
Epoch: 036/100 | Batch 100/469 | Gen/Dis Loss: 0.8743/0.6269
Epoch: 036/100 | Batch 200/469 | Gen/Dis Loss: 0.9682/0.6181
Epoch: 036/100 | Batch 300/469 | Gen/Dis Loss: 1.0213/0.6172
Epoch: 036/100 | Batch 400/469 | Gen/Dis Loss: 1.0728/0.5829
Time elapsed: 6.68 min
Epoch: 037/100 | Batch 000/469 | Gen/Dis Loss: 0.9110/0.6454
Epoch: 037/100 | Batch 100/469 | Gen/Dis Loss: 0.9418/0.6336
Epoch: 037/100 | Batch 200/469 | Gen/Dis Loss: 0.9703/0.6371
Epoch: 037/100 | Batch 300/469 | Gen/Dis Loss: 1.0006/0.6264
Epoch: 037/100 | Batch 400/469 | Gen/Dis Loss: 0.9831/0.6291
Time elapsed: 6.87 min
Epoch: 038/100 | Batch 000/469 | Gen/Dis Loss: 0.9877/0.6091
Epoch: 038/100 | Batch 100/469 | Gen/Dis Loss: 0.8961/0.6820
Epoch: 038/100 | Batch 200/469 | Gen/Dis Loss: 0.9768/0.6413
Epoch: 038/100 | Batch 300/469 | Gen/Dis Loss: 0.9086/0.6131
Epoch: 038/100 | Batch 400/469 | Gen/Dis Loss: 0.9212/0.6473
Time elapsed: 7.06 min
Epoch: 039/100 | Batch 000/469 | Gen/Dis Loss: 0.9597/0.6557
Epoch: 039/100 | Batch 100/469 | Gen/Dis Loss: 1.0108/0.6188
Epoch: 039/100 | Batch 200/469 | Gen/Dis Loss: 0.9724/0.6409
Epoch: 039/100 | Batch 300/469 | Gen/Dis Loss: 1.0631/0.6000
Epoch: 039/100 | Batch 400/469 | Gen/Dis Loss: 1.0164/0.6078
Time elapsed: 7.23 min
Epoch: 040/100 | Batch 000/469 | Gen/Dis Loss: 0.9027/0.6829
Epoch: 040/100 | Batch 100/469 | Gen/Dis Loss: 0.9385/0.6331
Epoch: 040/100 | Batch 200/469 | Gen/Dis Loss: 0.9257/0.6692
Epoch: 040/100 | Batch 300/469 | Gen/Dis Loss: 1.0278/0.6281
Epoch: 040/100 | Batch 400/469 | Gen/Dis Loss: 0.8908/0.6434
Time elapsed: 7.41 min
Epoch: 041/100 | Batch 000/469 | Gen/Dis Loss: 0.8392/0.6464
Epoch: 041/100 | Batch 100/469 | Gen/Dis Loss: 0.9264/0.6450
Epoch: 041/100 | Batch 200/469 | Gen/Dis Loss: 0.8712/0.6496
Epoch: 041/100 | Batch 300/469 | Gen/Dis Loss: 0.9087/0.6096
Epoch: 041/100 | Batch 400/469 | Gen/Dis Loss: 0.9793/0.6604
Time elapsed: 7.61 min
Epoch: 042/100 | Batch 000/469 | Gen/Dis Loss: 0.9750/0.6290
Epoch: 042/100 | Batch 100/469 | Gen/Dis Loss: 0.9613/0.6369
Epoch: 042/100 | Batch 200/469 | Gen/Dis Loss: 0.8647/0.6712
Epoch: 042/100 | Batch 300/469 | Gen/Dis Loss: 0.8525/0.6741
Epoch: 042/100 | Batch 400/469 | Gen/Dis Loss: 0.8626/0.6453
Time elapsed: 7.80 min
Epoch: 043/100 | Batch 000/469 | Gen/Dis Loss: 0.8535/0.6526
Epoch: 043/100 | Batch 100/469 | Gen/Dis Loss: 0.9141/0.6216
Epoch: 043/100 | Batch 200/469 | Gen/Dis Loss: 0.9024/0.6430
Epoch: 043/100 | Batch 300/469 | Gen/Dis Loss: 0.9806/0.6424
Epoch: 043/100 | Batch 400/469 | Gen/Dis Loss: 0.8825/0.6430
Time elapsed: 7.98 min
Epoch: 044/100 | Batch 000/469 | Gen/Dis Loss: 0.9506/0.6323
Epoch: 044/100 | Batch 100/469 | Gen/Dis Loss: 0.8813/0.6536
Epoch: 044/100 | Batch 200/469 | Gen/Dis Loss: 0.9125/0.6537
Epoch: 044/100 | Batch 300/469 | Gen/Dis Loss: 0.8490/0.6530
Epoch: 044/100 | Batch 400/469 | Gen/Dis Loss: 0.9261/0.6601
Time elapsed: 8.18 min
Epoch: 045/100 | Batch 000/469 | Gen/Dis Loss: 0.8833/0.6435
Epoch: 045/100 | Batch 100/469 | Gen/Dis Loss: 0.8944/0.6543
Epoch: 045/100 | Batch 200/469 | Gen/Dis Loss: 0.8968/0.6511
Epoch: 045/100 | Batch 300/469 | Gen/Dis Loss: 0.8690/0.6522
Epoch: 045/100 | Batch 400/469 | Gen/Dis Loss: 0.8871/0.6439
Time elapsed: 8.37 min
Epoch: 046/100 | Batch 000/469 | Gen/Dis Loss: 0.8794/0.6417
Epoch: 046/100 | Batch 100/469 | Gen/Dis Loss: 0.9484/0.6256
Epoch: 046/100 | Batch 200/469 | Gen/Dis Loss: 0.8949/0.6643
Epoch: 046/100 | Batch 300/469 | Gen/Dis Loss: 0.9169/0.6477
Epoch: 046/100 | Batch 400/469 | Gen/Dis Loss: 0.9111/0.6740
Time elapsed: 8.56 min
Epoch: 047/100 | Batch 000/469 | Gen/Dis Loss: 0.9315/0.6471
Epoch: 047/100 | Batch 100/469 | Gen/Dis Loss: 0.9397/0.6290
Epoch: 047/100 | Batch 200/469 | Gen/Dis Loss: 0.9085/0.6481
Epoch: 047/100 | Batch 300/469 | Gen/Dis Loss: 0.8705/0.6527
Epoch: 047/100 | Batch 400/469 | Gen/Dis Loss: 0.9047/0.6465
Time elapsed: 8.75 min
Epoch: 048/100 | Batch 000/469 | Gen/Dis Loss: 0.8974/0.6480
Epoch: 048/100 | Batch 100/469 | Gen/Dis Loss: 0.8152/0.6822
Epoch: 048/100 | Batch 200/469 | Gen/Dis Loss: 0.9224/0.6479
Epoch: 048/100 | Batch 300/469 | Gen/Dis Loss: 0.8793/0.6455
Epoch: 048/100 | Batch 400/469 | Gen/Dis Loss: 0.8849/0.6653
Time elapsed: 8.95 min
Epoch: 049/100 | Batch 000/469 | Gen/Dis Loss: 0.9057/0.6376
Epoch: 049/100 | Batch 100/469 | Gen/Dis Loss: 0.9147/0.6287
Epoch: 049/100 | Batch 200/469 | Gen/Dis Loss: 0.9788/0.6402
Epoch: 049/100 | Batch 300/469 | Gen/Dis Loss: 0.8769/0.6447
Epoch: 049/100 | Batch 400/469 | Gen/Dis Loss: 0.8975/0.6359
Time elapsed: 9.15 min
Epoch: 050/100 | Batch 000/469 | Gen/Dis Loss: 0.9455/0.6467
Epoch: 050/100 | Batch 100/469 | Gen/Dis Loss: 0.8935/0.6917
Epoch: 050/100 | Batch 200/469 | Gen/Dis Loss: 0.8943/0.6494
Epoch: 050/100 | Batch 300/469 | Gen/Dis Loss: 0.8874/0.6232
Epoch: 050/100 | Batch 400/469 | Gen/Dis Loss: 0.8918/0.6746
Time elapsed: 9.33 min
Epoch: 051/100 | Batch 000/469 | Gen/Dis Loss: 0.8766/0.6614
Epoch: 051/100 | Batch 100/469 | Gen/Dis Loss: 0.8982/0.6424
Epoch: 051/100 | Batch 200/469 | Gen/Dis Loss: 0.8892/0.6196
Epoch: 051/100 | Batch 300/469 | Gen/Dis Loss: 0.8533/0.6741
Epoch: 051/100 | Batch 400/469 | Gen/Dis Loss: 0.8637/0.6509
Time elapsed: 9.51 min
Epoch: 052/100 | Batch 000/469 | Gen/Dis Loss: 0.8511/0.6680
Epoch: 052/100 | Batch 100/469 | Gen/Dis Loss: 0.8697/0.6568
Epoch: 052/100 | Batch 200/469 | Gen/Dis Loss: 0.8437/0.6725
Epoch: 052/100 | Batch 300/469 | Gen/Dis Loss: 0.8739/0.6908
Epoch: 052/100 | Batch 400/469 | Gen/Dis Loss: 0.9267/0.6464
Time elapsed: 9.71 min
Epoch: 053/100 | Batch 000/469 | Gen/Dis Loss: 0.8242/0.6720
Epoch: 053/100 | Batch 100/469 | Gen/Dis Loss: 0.8904/0.6410
Epoch: 053/100 | Batch 200/469 | Gen/Dis Loss: 0.8620/0.6560
Epoch: 053/100 | Batch 300/469 | Gen/Dis Loss: 0.8165/0.6990
Epoch: 053/100 | Batch 400/469 | Gen/Dis Loss: 0.9925/0.6468
Time elapsed: 9.90 min
Epoch: 054/100 | Batch 000/469 | Gen/Dis Loss: 0.8690/0.6812
Epoch: 054/100 | Batch 100/469 | Gen/Dis Loss: 0.8796/0.6673
Epoch: 054/100 | Batch 200/469 | Gen/Dis Loss: 0.8629/0.6625
Epoch: 054/100 | Batch 300/469 | Gen/Dis Loss: 0.8851/0.6427
Epoch: 054/100 | Batch 400/469 | Gen/Dis Loss: 0.9085/0.6378
Time elapsed: 10.09 min
Epoch: 055/100 | Batch 000/469 | Gen/Dis Loss: 0.9116/0.6681
Epoch: 055/100 | Batch 100/469 | Gen/Dis Loss: 0.9058/0.6517
Epoch: 055/100 | Batch 200/469 | Gen/Dis Loss: 0.8960/0.6652
Epoch: 055/100 | Batch 300/469 | Gen/Dis Loss: 0.8504/0.6793
Epoch: 055/100 | Batch 400/469 | Gen/Dis Loss: 0.9018/0.6498
Time elapsed: 10.29 min
Epoch: 056/100 | Batch 000/469 | Gen/Dis Loss: 0.8087/0.6975
Epoch: 056/100 | Batch 100/469 | Gen/Dis Loss: 0.9034/0.6551
Epoch: 056/100 | Batch 200/469 | Gen/Dis Loss: 0.8600/0.6592
Epoch: 056/100 | Batch 300/469 | Gen/Dis Loss: 0.8801/0.6692
Epoch: 056/100 | Batch 400/469 | Gen/Dis Loss: 0.8501/0.6948
Time elapsed: 10.48 min
Epoch: 057/100 | Batch 000/469 | Gen/Dis Loss: 0.8765/0.6890
Epoch: 057/100 | Batch 100/469 | Gen/Dis Loss: 0.8667/0.6659
Epoch: 057/100 | Batch 200/469 | Gen/Dis Loss: 0.8265/0.6724
Epoch: 057/100 | Batch 300/469 | Gen/Dis Loss: 0.8454/0.6812
Epoch: 057/100 | Batch 400/469 | Gen/Dis Loss: 0.8842/0.6953
Time elapsed: 10.69 min
Epoch: 058/100 | Batch 000/469 | Gen/Dis Loss: 0.8508/0.6527
Epoch: 058/100 | Batch 100/469 | Gen/Dis Loss: 0.8790/0.6718
Epoch: 058/100 | Batch 200/469 | Gen/Dis Loss: 0.8515/0.6719
Epoch: 058/100 | Batch 300/469 | Gen/Dis Loss: 0.8757/0.6311
Epoch: 058/100 | Batch 400/469 | Gen/Dis Loss: 0.9075/0.6461
Time elapsed: 10.86 min
Epoch: 059/100 | Batch 000/469 | Gen/Dis Loss: 0.8903/0.6573
Epoch: 059/100 | Batch 100/469 | Gen/Dis Loss: 0.8772/0.6456
Epoch: 059/100 | Batch 200/469 | Gen/Dis Loss: 0.8487/0.6886
Epoch: 059/100 | Batch 300/469 | Gen/Dis Loss: 0.8758/0.6656
Epoch: 059/100 | Batch 400/469 | Gen/Dis Loss: 0.7832/0.7048
Time elapsed: 11.07 min
Epoch: 060/100 | Batch 000/469 | Gen/Dis Loss: 0.8813/0.6634
Epoch: 060/100 | Batch 100/469 | Gen/Dis Loss: 0.8211/0.6760
Epoch: 060/100 | Batch 200/469 | Gen/Dis Loss: 0.8294/0.6861
Epoch: 060/100 | Batch 300/469 | Gen/Dis Loss: 0.8535/0.6794
Epoch: 060/100 | Batch 400/469 | Gen/Dis Loss: 0.8986/0.6499
Time elapsed: 11.27 min
Epoch: 061/100 | Batch 000/469 | Gen/Dis Loss: 0.8992/0.6577
Epoch: 061/100 | Batch 100/469 | Gen/Dis Loss: 0.8802/0.6738
Epoch: 061/100 | Batch 200/469 | Gen/Dis Loss: 0.8823/0.6861
Epoch: 061/100 | Batch 300/469 | Gen/Dis Loss: 0.8590/0.6601
Epoch: 061/100 | Batch 400/469 | Gen/Dis Loss: 0.8702/0.6688
Time elapsed: 11.46 min
Epoch: 062/100 | Batch 000/469 | Gen/Dis Loss: 0.8650/0.6517
Epoch: 062/100 | Batch 100/469 | Gen/Dis Loss: 0.8796/0.6479
Epoch: 062/100 | Batch 200/469 | Gen/Dis Loss: 0.9608/0.6564
Epoch: 062/100 | Batch 300/469 | Gen/Dis Loss: 0.8365/0.6959
Epoch: 062/100 | Batch 400/469 | Gen/Dis Loss: 0.8407/0.6635
Time elapsed: 11.65 min
Epoch: 063/100 | Batch 000/469 | Gen/Dis Loss: 0.8371/0.6663
Epoch: 063/100 | Batch 100/469 | Gen/Dis Loss: 0.8306/0.6620
Epoch: 063/100 | Batch 200/469 | Gen/Dis Loss: 0.8949/0.6656
Epoch: 063/100 | Batch 300/469 | Gen/Dis Loss: 0.8633/0.6690
Epoch: 063/100 | Batch 400/469 | Gen/Dis Loss: 0.8143/0.6708
Time elapsed: 11.83 min
Epoch: 064/100 | Batch 000/469 | Gen/Dis Loss: 0.8591/0.6857
Epoch: 064/100 | Batch 100/469 | Gen/Dis Loss: 0.8329/0.6863
Epoch: 064/100 | Batch 200/469 | Gen/Dis Loss: 0.8904/0.6684
Epoch: 064/100 | Batch 300/469 | Gen/Dis Loss: 0.8345/0.6737
Epoch: 064/100 | Batch 400/469 | Gen/Dis Loss: 0.8772/0.6603
Time elapsed: 12.02 min
Epoch: 065/100 | Batch 000/469 | Gen/Dis Loss: 0.8387/0.6540
Epoch: 065/100 | Batch 100/469 | Gen/Dis Loss: 0.8358/0.6991
Epoch: 065/100 | Batch 200/469 | Gen/Dis Loss: 0.7720/0.7111
Epoch: 065/100 | Batch 300/469 | Gen/Dis Loss: 0.8285/0.6822
Epoch: 065/100 | Batch 400/469 | Gen/Dis Loss: 0.8025/0.6701
Time elapsed: 12.21 min
Epoch: 066/100 | Batch 000/469 | Gen/Dis Loss: 0.8651/0.6684
Epoch: 066/100 | Batch 100/469 | Gen/Dis Loss: 0.8062/0.6797
Epoch: 066/100 | Batch 200/469 | Gen/Dis Loss: 0.9133/0.6643
Epoch: 066/100 | Batch 300/469 | Gen/Dis Loss: 0.8128/0.6684
Epoch: 066/100 | Batch 400/469 | Gen/Dis Loss: 0.8464/0.6777
Time elapsed: 12.40 min
Epoch: 067/100 | Batch 000/469 | Gen/Dis Loss: 0.8408/0.6792
Epoch: 067/100 | Batch 100/469 | Gen/Dis Loss: 0.8878/0.6793
Epoch: 067/100 | Batch 200/469 | Gen/Dis Loss: 0.8082/0.6872
Epoch: 067/100 | Batch 300/469 | Gen/Dis Loss: 0.8299/0.6611
Epoch: 067/100 | Batch 400/469 | Gen/Dis Loss: 0.8407/0.6531
Time elapsed: 12.59 min
Epoch: 068/100 | Batch 000/469 | Gen/Dis Loss: 0.8564/0.6520
Epoch: 068/100 | Batch 100/469 | Gen/Dis Loss: 0.8364/0.6977
Epoch: 068/100 | Batch 200/469 | Gen/Dis Loss: 0.8073/0.6688
Epoch: 068/100 | Batch 300/469 | Gen/Dis Loss: 0.8770/0.6818
Epoch: 068/100 | Batch 400/469 | Gen/Dis Loss: 0.8454/0.6738
Time elapsed: 12.79 min
Epoch: 069/100 | Batch 000/469 | Gen/Dis Loss: 0.8455/0.6630
Epoch: 069/100 | Batch 100/469 | Gen/Dis Loss: 0.8016/0.6751
Epoch: 069/100 | Batch 200/469 | Gen/Dis Loss: 0.7976/0.6843
Epoch: 069/100 | Batch 300/469 | Gen/Dis Loss: 0.8515/0.6635
Epoch: 069/100 | Batch 400/469 | Gen/Dis Loss: 0.8309/0.6798
Time elapsed: 12.97 min
Epoch: 070/100 | Batch 000/469 | Gen/Dis Loss: 0.8545/0.6897
Epoch: 070/100 | Batch 100/469 | Gen/Dis Loss: 0.8726/0.6868
Epoch: 070/100 | Batch 200/469 | Gen/Dis Loss: 0.8559/0.6552
Epoch: 070/100 | Batch 300/469 | Gen/Dis Loss: 0.8333/0.6826
Epoch: 070/100 | Batch 400/469 | Gen/Dis Loss: 0.8670/0.6838
Time elapsed: 13.14 min
Epoch: 071/100 | Batch 000/469 | Gen/Dis Loss: 0.8459/0.6764
Epoch: 071/100 | Batch 100/469 | Gen/Dis Loss: 0.7693/0.6981
Epoch: 071/100 | Batch 200/469 | Gen/Dis Loss: 0.8273/0.6906
Epoch: 071/100 | Batch 300/469 | Gen/Dis Loss: 0.8965/0.6696
Epoch: 071/100 | Batch 400/469 | Gen/Dis Loss: 0.7930/0.6637
Time elapsed: 13.31 min
Epoch: 072/100 | Batch 000/469 | Gen/Dis Loss: 0.8628/0.6897
Epoch: 072/100 | Batch 100/469 | Gen/Dis Loss: 0.8148/0.7031
Epoch: 072/100 | Batch 200/469 | Gen/Dis Loss: 0.8407/0.6823
Epoch: 072/100 | Batch 300/469 | Gen/Dis Loss: 0.8590/0.6731
Epoch: 072/100 | Batch 400/469 | Gen/Dis Loss: 0.8184/0.6741
Time elapsed: 13.51 min
Epoch: 073/100 | Batch 000/469 | Gen/Dis Loss: 0.7666/0.6783
Epoch: 073/100 | Batch 100/469 | Gen/Dis Loss: 0.8279/0.6769
Epoch: 073/100 | Batch 200/469 | Gen/Dis Loss: 0.8110/0.6930
Epoch: 073/100 | Batch 300/469 | Gen/Dis Loss: 0.8663/0.6692
Epoch: 073/100 | Batch 400/469 | Gen/Dis Loss: 0.8638/0.6722
Time elapsed: 13.70 min
Epoch: 074/100 | Batch 000/469 | Gen/Dis Loss: 0.8186/0.6884
Epoch: 074/100 | Batch 100/469 | Gen/Dis Loss: 0.8437/0.6959
Epoch: 074/100 | Batch 200/469 | Gen/Dis Loss: 0.8497/0.6886
Epoch: 074/100 | Batch 300/469 | Gen/Dis Loss: 0.8581/0.6837
Epoch: 074/100 | Batch 400/469 | Gen/Dis Loss: 0.8172/0.6859
Time elapsed: 13.88 min
Epoch: 075/100 | Batch 000/469 | Gen/Dis Loss: 0.8053/0.6592
Epoch: 075/100 | Batch 100/469 | Gen/Dis Loss: 0.8656/0.6731
Epoch: 075/100 | Batch 200/469 | Gen/Dis Loss: 0.8292/0.6771
Epoch: 075/100 | Batch 300/469 | Gen/Dis Loss: 0.8372/0.6708
Epoch: 075/100 | Batch 400/469 | Gen/Dis Loss: 0.7856/0.7005
Time elapsed: 14.06 min
Epoch: 076/100 | Batch 000/469 | Gen/Dis Loss: 0.7723/0.6855
Epoch: 076/100 | Batch 100/469 | Gen/Dis Loss: 0.8779/0.6654
Epoch: 076/100 | Batch 200/469 | Gen/Dis Loss: 0.8270/0.6864
Epoch: 076/100 | Batch 300/469 | Gen/Dis Loss: 0.8166/0.6800
Epoch: 076/100 | Batch 400/469 | Gen/Dis Loss: 0.8451/0.6660
Time elapsed: 14.25 min
Epoch: 077/100 | Batch 000/469 | Gen/Dis Loss: 0.8800/0.6756
Epoch: 077/100 | Batch 100/469 | Gen/Dis Loss: 0.8517/0.6399
Epoch: 077/100 | Batch 200/469 | Gen/Dis Loss: 0.7910/0.6974
Epoch: 077/100 | Batch 300/469 | Gen/Dis Loss: 0.8447/0.7101
Epoch: 077/100 | Batch 400/469 | Gen/Dis Loss: 0.8225/0.7023
Time elapsed: 14.45 min
Epoch: 078/100 | Batch 000/469 | Gen/Dis Loss: 0.8520/0.6680
Epoch: 078/100 | Batch 100/469 | Gen/Dis Loss: 0.8277/0.6806
Epoch: 078/100 | Batch 200/469 | Gen/Dis Loss: 0.8407/0.6542
Epoch: 078/100 | Batch 300/469 | Gen/Dis Loss: 0.8216/0.6805
Epoch: 078/100 | Batch 400/469 | Gen/Dis Loss: 0.7893/0.6845
Time elapsed: 14.64 min
Epoch: 079/100 | Batch 000/469 | Gen/Dis Loss: 0.8530/0.6718
Epoch: 079/100 | Batch 100/469 | Gen/Dis Loss: 0.8236/0.6928
Epoch: 079/100 | Batch 200/469 | Gen/Dis Loss: 0.8575/0.6589
Epoch: 079/100 | Batch 300/469 | Gen/Dis Loss: 0.8002/0.6664
Epoch: 079/100 | Batch 400/469 | Gen/Dis Loss: 0.8154/0.6842
Time elapsed: 14.83 min
Epoch: 080/100 | Batch 000/469 | Gen/Dis Loss: 0.8235/0.6611
Epoch: 080/100 | Batch 100/469 | Gen/Dis Loss: 0.8061/0.6910
Epoch: 080/100 | Batch 200/469 | Gen/Dis Loss: 0.8549/0.6424
Epoch: 080/100 | Batch 300/469 | Gen/Dis Loss: 0.8395/0.6650
Epoch: 080/100 | Batch 400/469 | Gen/Dis Loss: 0.8219/0.6905
Time elapsed: 15.01 min
Epoch: 081/100 | Batch 000/469 | Gen/Dis Loss: 0.7948/0.7008
Epoch: 081/100 | Batch 100/469 | Gen/Dis Loss: 0.8199/0.6954
Epoch: 081/100 | Batch 200/469 | Gen/Dis Loss: 0.7884/0.6894
Epoch: 081/100 | Batch 300/469 | Gen/Dis Loss: 0.8397/0.6713
Epoch: 081/100 | Batch 400/469 | Gen/Dis Loss: 0.8352/0.6776
Time elapsed: 15.20 min
Epoch: 082/100 | Batch 000/469 | Gen/Dis Loss: 0.8150/0.6710
Epoch: 082/100 | Batch 100/469 | Gen/Dis Loss: 0.8771/0.6653
Epoch: 082/100 | Batch 200/469 | Gen/Dis Loss: 0.8921/0.6677
Epoch: 082/100 | Batch 300/469 | Gen/Dis Loss: 0.8528/0.6654
Epoch: 082/100 | Batch 400/469 | Gen/Dis Loss: 0.8173/0.6834
Time elapsed: 15.40 min
Epoch: 083/100 | Batch 000/469 | Gen/Dis Loss: 0.7978/0.6876
Epoch: 083/100 | Batch 100/469 | Gen/Dis Loss: 0.7637/0.6887
Epoch: 083/100 | Batch 200/469 | Gen/Dis Loss: 0.8266/0.6630
Epoch: 083/100 | Batch 300/469 | Gen/Dis Loss: 0.8486/0.6562
Epoch: 083/100 | Batch 400/469 | Gen/Dis Loss: 0.8664/0.6574
Time elapsed: 15.58 min
Epoch: 084/100 | Batch 000/469 | Gen/Dis Loss: 0.8384/0.6714
Epoch: 084/100 | Batch 100/469 | Gen/Dis Loss: 0.8248/0.6801
Epoch: 084/100 | Batch 200/469 | Gen/Dis Loss: 0.8184/0.6847
Epoch: 084/100 | Batch 300/469 | Gen/Dis Loss: 0.8493/0.6565
Epoch: 084/100 | Batch 400/469 | Gen/Dis Loss: 0.8411/0.6944
Time elapsed: 15.76 min
Epoch: 085/100 | Batch 000/469 | Gen/Dis Loss: 0.8351/0.6915
Epoch: 085/100 | Batch 100/469 | Gen/Dis Loss: 0.7990/0.6961
Epoch: 085/100 | Batch 200/469 | Gen/Dis Loss: 0.7945/0.6906
Epoch: 085/100 | Batch 300/469 | Gen/Dis Loss: 0.8430/0.6504
Epoch: 085/100 | Batch 400/469 | Gen/Dis Loss: 0.7981/0.6782
Time elapsed: 15.94 min
Epoch: 086/100 | Batch 000/469 | Gen/Dis Loss: 0.8331/0.6794
Epoch: 086/100 | Batch 100/469 | Gen/Dis Loss: 0.8097/0.6724
Epoch: 086/100 | Batch 200/469 | Gen/Dis Loss: 0.8284/0.6881
Epoch: 086/100 | Batch 300/469 | Gen/Dis Loss: 0.8113/0.6980
Epoch: 086/100 | Batch 400/469 | Gen/Dis Loss: 0.7919/0.6913
Time elapsed: 16.14 min
Epoch: 087/100 | Batch 000/469 | Gen/Dis Loss: 0.8008/0.7018
Epoch: 087/100 | Batch 100/469 | Gen/Dis Loss: 0.8206/0.7019
Epoch: 087/100 | Batch 200/469 | Gen/Dis Loss: 0.8046/0.6866
Epoch: 087/100 | Batch 300/469 | Gen/Dis Loss: 0.7627/0.6903
Epoch: 087/100 | Batch 400/469 | Gen/Dis Loss: 0.8056/0.6777
Time elapsed: 16.32 min
Epoch: 088/100 | Batch 000/469 | Gen/Dis Loss: 0.8370/0.6740
Epoch: 088/100 | Batch 100/469 | Gen/Dis Loss: 0.7858/0.6904
Epoch: 088/100 | Batch 200/469 | Gen/Dis Loss: 0.8060/0.6836
Epoch: 088/100 | Batch 300/469 | Gen/Dis Loss: 0.8020/0.6937
Epoch: 088/100 | Batch 400/469 | Gen/Dis Loss: 0.7650/0.6909
Time elapsed: 16.50 min
Epoch: 089/100 | Batch 000/469 | Gen/Dis Loss: 0.7933/0.6834
Epoch: 089/100 | Batch 100/469 | Gen/Dis Loss: 0.8216/0.6727
Epoch: 089/100 | Batch 200/469 | Gen/Dis Loss: 0.8293/0.6709
Epoch: 089/100 | Batch 300/469 | Gen/Dis Loss: 0.8014/0.6920
Epoch: 089/100 | Batch 400/469 | Gen/Dis Loss: 0.7817/0.6994
Time elapsed: 16.69 min
Epoch: 090/100 | Batch 000/469 | Gen/Dis Loss: 0.8630/0.6739
Epoch: 090/100 | Batch 100/469 | Gen/Dis Loss: 0.8315/0.6730
Epoch: 090/100 | Batch 200/469 | Gen/Dis Loss: 0.7934/0.6899
Epoch: 090/100 | Batch 300/469 | Gen/Dis Loss: 0.8192/0.6777
Epoch: 090/100 | Batch 400/469 | Gen/Dis Loss: 0.8106/0.7134
Time elapsed: 16.87 min
Epoch: 091/100 | Batch 000/469 | Gen/Dis Loss: 0.7778/0.6685
Epoch: 091/100 | Batch 100/469 | Gen/Dis Loss: 0.7780/0.6948
Epoch: 091/100 | Batch 200/469 | Gen/Dis Loss: 0.8179/0.6919
Epoch: 091/100 | Batch 300/469 | Gen/Dis Loss: 0.8324/0.6767
Epoch: 091/100 | Batch 400/469 | Gen/Dis Loss: 0.7972/0.6918
Time elapsed: 17.05 min
Epoch: 092/100 | Batch 000/469 | Gen/Dis Loss: 0.8004/0.6804
Epoch: 092/100 | Batch 100/469 | Gen/Dis Loss: 0.8496/0.6824
Epoch: 092/100 | Batch 200/469 | Gen/Dis Loss: 0.8352/0.6727
Epoch: 092/100 | Batch 300/469 | Gen/Dis Loss: 0.7924/0.6836
Epoch: 092/100 | Batch 400/469 | Gen/Dis Loss: 0.8173/0.6735
Time elapsed: 17.24 min
Epoch: 093/100 | Batch 000/469 | Gen/Dis Loss: 0.7848/0.7050
Epoch: 093/100 | Batch 100/469 | Gen/Dis Loss: 0.7997/0.6791
Epoch: 093/100 | Batch 200/469 | Gen/Dis Loss: 0.8102/0.6733
Epoch: 093/100 | Batch 300/469 | Gen/Dis Loss: 0.7786/0.7098
Epoch: 093/100 | Batch 400/469 | Gen/Dis Loss: 0.8201/0.6757
Time elapsed: 17.42 min
Epoch: 094/100 | Batch 000/469 | Gen/Dis Loss: 0.8135/0.6697
Epoch: 094/100 | Batch 100/469 | Gen/Dis Loss: 0.8209/0.6749
Epoch: 094/100 | Batch 200/469 | Gen/Dis Loss: 0.8009/0.6885
Epoch: 094/100 | Batch 300/469 | Gen/Dis Loss: 0.8136/0.6814
Epoch: 094/100 | Batch 400/469 | Gen/Dis Loss: 0.7999/0.6860
Time elapsed: 17.61 min
Epoch: 095/100 | Batch 000/469 | Gen/Dis Loss: 0.8331/0.6654
Epoch: 095/100 | Batch 100/469 | Gen/Dis Loss: 0.7905/0.6873
Epoch: 095/100 | Batch 200/469 | Gen/Dis Loss: 0.7920/0.6876
Epoch: 095/100 | Batch 300/469 | Gen/Dis Loss: 0.7752/0.6944
Epoch: 095/100 | Batch 400/469 | Gen/Dis Loss: 0.8004/0.6750
Time elapsed: 17.81 min
Epoch: 096/100 | Batch 000/469 | Gen/Dis Loss: 0.8572/0.6645
Epoch: 096/100 | Batch 100/469 | Gen/Dis Loss: 0.7788/0.7004
Epoch: 096/100 | Batch 200/469 | Gen/Dis Loss: 0.8079/0.7025
Epoch: 096/100 | Batch 300/469 | Gen/Dis Loss: 0.8139/0.6936
Epoch: 096/100 | Batch 400/469 | Gen/Dis Loss: 0.7975/0.6916
Time elapsed: 17.99 min
Epoch: 097/100 | Batch 000/469 | Gen/Dis Loss: 0.7780/0.7176
Epoch: 097/100 | Batch 100/469 | Gen/Dis Loss: 0.8270/0.6750
Epoch: 097/100 | Batch 200/469 | Gen/Dis Loss: 0.8101/0.6973
Epoch: 097/100 | Batch 300/469 | Gen/Dis Loss: 0.8122/0.6742
Epoch: 097/100 | Batch 400/469 | Gen/Dis Loss: 0.8158/0.6763
Time elapsed: 18.17 min
Epoch: 098/100 | Batch 000/469 | Gen/Dis Loss: 0.8231/0.6770
Epoch: 098/100 | Batch 100/469 | Gen/Dis Loss: 0.8054/0.6920
Epoch: 098/100 | Batch 200/469 | Gen/Dis Loss: 0.8291/0.6844
Epoch: 098/100 | Batch 300/469 | Gen/Dis Loss: 0.8211/0.6835
Epoch: 098/100 | Batch 400/469 | Gen/Dis Loss: 0.8166/0.6829
Time elapsed: 18.35 min
Epoch: 099/100 | Batch 000/469 | Gen/Dis Loss: 0.7991/0.7082
Epoch: 099/100 | Batch 100/469 | Gen/Dis Loss: 0.8127/0.6762
Epoch: 099/100 | Batch 200/469 | Gen/Dis Loss: 0.7681/0.6932
Epoch: 099/100 | Batch 300/469 | Gen/Dis Loss: 0.7964/0.6856
Epoch: 099/100 | Batch 400/469 | Gen/Dis Loss: 0.8375/0.6877
Time elapsed: 18.54 min
Epoch: 100/100 | Batch 000/469 | Gen/Dis Loss: 0.7788/0.6775
Epoch: 100/100 | Batch 100/469 | Gen/Dis Loss: 0.7719/0.6917
Epoch: 100/100 | Batch 200/469 | Gen/Dis Loss: 0.8062/0.6970
Epoch: 100/100 | Batch 300/469 | Gen/Dis Loss: 0.8263/0.6865
Epoch: 100/100 | Batch 400/469 | Gen/Dis Loss: 0.8079/0.6748
Time elapsed: 18.73 min
Total Training Time: 18.73 min
In [9]:
### For Debugging

"""
for i in outputs:
    print(i.size())
"""
Out[9]:
'\nfor i in outputs:\n    print(i.size())\n'

Evaluation

In [10]:
%matplotlib inline
import matplotlib.pyplot as plt
In [11]:
ax1 = plt.subplot(1, 1, 1)
ax1.plot(range(len(gener_costs)), gener_costs, label='Generator loss')
ax1.plot(range(len(discr_costs)), discr_costs, label='Discriminator loss')
ax1.set_xlabel('Iterations')
ax1.set_ylabel('Loss')
ax1.legend()

###################
# Set scond x-axis
ax2 = ax1.twiny()
newlabel = list(range(NUM_EPOCHS+1))
iter_per_epoch = len(train_loader)
newpos = [e*iter_per_epoch for e in newlabel]

ax2.set_xticklabels(newlabel[::10])
ax2.set_xticks(newpos[::10])

ax2.xaxis.set_ticks_position('bottom')
ax2.xaxis.set_label_position('bottom')
ax2.spines['bottom'].set_position(('outward', 45))
ax2.set_xlabel('Epochs')
ax2.set_xlim(ax1.get_xlim())
###################

plt.show()
In [12]:
##########################
### VISUALIZATION
##########################


model.eval()
# Make new images
z = torch.zeros((5, LATENT_DIM)).uniform_(-1.0, 1.0).to(device)
generated_features = model.generator_forward(z)
imgs = generated_features.view(-1, 28, 28)

fig, axes = plt.subplots(nrows=1, ncols=5, figsize=(20, 2.5))


for i, ax in enumerate(axes):
    axes[i].imshow(imgs[i].to(torch.device('cpu')).detach(), cmap='binary')
In [13]:
from torchsummary import summary
model = model.to('cuda:0')
summary(model.generator, input_size=(100,))
summary(model.discriminator, input_size=(1, 28, 28))
----------------------------------------------------------------
        Layer (type)               Output Shape         Param #
================================================================
            Linear-1                 [-1, 3136]         313,600
       BatchNorm1d-2                 [-1, 3136]           6,272
         LeakyReLU-3                 [-1, 3136]               0
          Reshape1-4             [-1, 64, 7, 7]               0
   ConvTranspose2d-5           [-1, 32, 13, 13]          18,432
       BatchNorm2d-6           [-1, 32, 13, 13]              64
         LeakyReLU-7           [-1, 32, 13, 13]               0
   ConvTranspose2d-8           [-1, 16, 25, 25]           4,608
       BatchNorm2d-9           [-1, 16, 25, 25]              32
        LeakyReLU-10           [-1, 16, 25, 25]               0
  ConvTranspose2d-11            [-1, 8, 27, 27]           1,152
      BatchNorm2d-12            [-1, 8, 27, 27]              16
        LeakyReLU-13            [-1, 8, 27, 27]               0
  ConvTranspose2d-14            [-1, 1, 28, 28]              32
             Tanh-15            [-1, 1, 28, 28]               0
================================================================
Total params: 344,208
Trainable params: 344,208
Non-trainable params: 0
----------------------------------------------------------------
Input size (MB): 0.00
Forward/backward pass size (MB): 0.59
Params size (MB): 1.31
Estimated Total Size (MB): 1.91
----------------------------------------------------------------
----------------------------------------------------------------
        Layer (type)               Output Shape         Param #
================================================================
            Conv2d-1            [-1, 8, 14, 14]              72
       BatchNorm2d-2            [-1, 8, 14, 14]              16
         LeakyReLU-3            [-1, 8, 14, 14]               0
            Conv2d-4             [-1, 32, 7, 7]           2,304
       BatchNorm2d-5             [-1, 32, 7, 7]              64
         LeakyReLU-6             [-1, 32, 7, 7]               0
           Flatten-7                 [-1, 1568]               0
            Linear-8                    [-1, 1]           1,569
================================================================
Total params: 4,025
Trainable params: 4,025
Non-trainable params: 0
----------------------------------------------------------------
Input size (MB): 0.00
Forward/backward pass size (MB): 0.08
Params size (MB): 0.02
Estimated Total Size (MB): 0.10
----------------------------------------------------------------
In [ ]: