Deep Learning Models -- A collection of various deep learning architectures, models, and tips for TensorFlow and PyTorch in Jupyter Notebooks.

In [1]:
%load_ext watermark
%watermark -a 'Sebastian Raschka' -v -p torch
Sebastian Raschka 

CPython 3.7.3
IPython 7.6.1

torch 1.2.0
  • Runs on CPU or GPU (if available)

Deep Convolutional GAN

Imports

In [2]:
import time
import numpy as np
import torch
import torch.nn.functional as F
from torchvision import datasets
from torchvision import transforms
import torch.nn as nn
from torch.utils.data import DataLoader


if torch.cuda.is_available():
    torch.backends.cudnn.deterministic = True

Settings and Dataset

In [3]:
##########################
### SETTINGS
##########################

# Device
device = torch.device("cuda:1" if torch.cuda.is_available() else "cpu")

# Hyperparameters
random_seed = 42
generator_learning_rate = 0.0001
discriminator_learning_rate = 0.0001
NUM_EPOCHS = 100
BATCH_SIZE = 128
LATENT_DIM = 100
IMG_SHAPE = (1, 28, 28)
IMG_SIZE = 1
for x in IMG_SHAPE:
    IMG_SIZE *= x



##########################
### MNIST DATASET
##########################

# Note transforms.ToTensor() scales input images
# to 0-1 range
train_dataset = datasets.MNIST(root='data', 
                               train=True, 
                               transform=transforms.ToTensor(),
                               download=True)

test_dataset = datasets.MNIST(root='data', 
                              train=False, 
                              transform=transforms.ToTensor())


train_loader = DataLoader(dataset=train_dataset, 
                          batch_size=BATCH_SIZE,
                          num_workers=4,
                          shuffle=True)

test_loader = DataLoader(dataset=test_dataset, 
                         batch_size=BATCH_SIZE,
                         num_workers=4,
                         shuffle=False)

# Checking the dataset
for images, labels in train_loader:  
    print('Image batch dimensions:', images.shape)
    print('Image label dimensions:', labels.shape)
    break
Image batch dimensions: torch.Size([128, 1, 28, 28])
Image label dimensions: torch.Size([128])

Model

In [4]:
##########################
### MODEL
##########################

class Flatten(nn.Module):
    def forward(self, input):
        return input.view(input.size(0), -1)
    
class Reshape1(nn.Module):
    def forward(self, input):
        return input.view(input.size(0), 64, 7, 7)


class GAN(torch.nn.Module):

    def __init__(self):
        super(GAN, self).__init__()
        
        
        self.generator = nn.Sequential(
              
            nn.Linear(LATENT_DIM, 3136, bias=False),
            nn.BatchNorm1d(num_features=3136),
            nn.LeakyReLU(inplace=True, negative_slope=0.0001),
            Reshape1(),
            
            nn.ConvTranspose2d(in_channels=64, out_channels=32, kernel_size=(3, 3), stride=(2, 2), padding=1, bias=False),
            nn.BatchNorm2d(num_features=32),
            nn.LeakyReLU(inplace=True, negative_slope=0.0001),
            #nn.Dropout2d(p=0.2),
            
            nn.ConvTranspose2d(in_channels=32, out_channels=16, kernel_size=(3, 3), stride=(2, 2), padding=1, bias=False),
            nn.BatchNorm2d(num_features=16),
            nn.LeakyReLU(inplace=True, negative_slope=0.0001),
            #nn.Dropout2d(p=0.2),
            
            nn.ConvTranspose2d(in_channels=16, out_channels=8, kernel_size=(3, 3), stride=(1, 1), padding=0, bias=False),
            nn.BatchNorm2d(num_features=8),
            nn.LeakyReLU(inplace=True, negative_slope=0.0001),
            #nn.Dropout2d(p=0.2),
            
            nn.ConvTranspose2d(in_channels=8, out_channels=1, kernel_size=(2, 2), stride=(1, 1), padding=0, bias=False),
            nn.Tanh()
        )
        
        self.discriminator = nn.Sequential(
            nn.Conv2d(in_channels=1, out_channels=8, padding=1, kernel_size=(3, 3), stride=(2, 2), bias=False),
            nn.BatchNorm2d(num_features=8),
            nn.LeakyReLU(inplace=True, negative_slope=0.0001), 
            #nn.Dropout2d(p=0.2),
            
            nn.Conv2d(in_channels=8, out_channels=32, padding=1, kernel_size=(3, 3), stride=(2, 2), bias=False),
            nn.BatchNorm2d(num_features=32),
            nn.LeakyReLU(inplace=True, negative_slope=0.0001), 
            #nn.Dropout2d(p=0.2),
            
            Flatten(),

            nn.Linear(7*7*32, 1),
            #nn.Sigmoid()
        )

            
    def generator_forward(self, z):
        img = self.generator(z)
        return img
    
    def discriminator_forward(self, img):
        pred = model.discriminator(img)
        return pred.view(-1)
In [5]:
torch.manual_seed(random_seed)

#del model
model = GAN()
model = model.to(device)

print(model)
GAN(
  (generator): Sequential(
    (0): Linear(in_features=100, out_features=3136, bias=False)
    (1): BatchNorm1d(3136, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (2): LeakyReLU(negative_slope=0.0001, inplace=True)
    (3): Reshape1()
    (4): ConvTranspose2d(64, 32, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
    (5): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (6): LeakyReLU(negative_slope=0.0001, inplace=True)
    (7): ConvTranspose2d(32, 16, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
    (8): BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (9): LeakyReLU(negative_slope=0.0001, inplace=True)
    (10): ConvTranspose2d(16, 8, kernel_size=(3, 3), stride=(1, 1), bias=False)
    (11): BatchNorm2d(8, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (12): LeakyReLU(negative_slope=0.0001, inplace=True)
    (13): ConvTranspose2d(8, 1, kernel_size=(2, 2), stride=(1, 1), bias=False)
    (14): Tanh()
  )
  (discriminator): Sequential(
    (0): Conv2d(1, 8, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
    (1): BatchNorm2d(8, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (2): LeakyReLU(negative_slope=0.0001, inplace=True)
    (3): Conv2d(8, 32, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
    (4): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (5): LeakyReLU(negative_slope=0.0001, inplace=True)
    (6): Flatten()
    (7): Linear(in_features=1568, out_features=1, bias=True)
  )
)
In [6]:
### ## FOR DEBUGGING

"""
outputs = []
def hook(module, input, output):
    outputs.append(output)

#for i, layer in enumerate(model.discriminator):
#    if isinstance(layer, torch.nn.modules.conv.Conv2d):
#        model.discriminator[i].register_forward_hook(hook)

for i, layer in enumerate(model.generator):
    if isinstance(layer, torch.nn.modules.ConvTranspose2d):
        model.generator[i].register_forward_hook(hook)
"""
Out[6]:
'\noutputs = []\ndef hook(module, input, output):\n    outputs.append(output)\n\n#for i, layer in enumerate(model.discriminator):\n#    if isinstance(layer, torch.nn.modules.conv.Conv2d):\n#        model.discriminator[i].register_forward_hook(hook)\n\nfor i, layer in enumerate(model.generator):\n    if isinstance(layer, torch.nn.modules.ConvTranspose2d):\n        model.generator[i].register_forward_hook(hook)\n'
In [7]:
optim_gener = torch.optim.Adam(model.generator.parameters(), lr=generator_learning_rate)
optim_discr = torch.optim.Adam(model.discriminator.parameters(), lr=discriminator_learning_rate)

Training

In [8]:
start_time = time.time()    

discr_costs = []
gener_costs = []
for epoch in range(NUM_EPOCHS):
    model = model.train()
    for batch_idx, (features, targets) in enumerate(train_loader):

        
        # Normalize images to [-1, 1] range
        features = (features - 0.5)*2.
        features = features.view(-1, IMG_SIZE).to(device) 

        targets = targets.to(device)

        valid = torch.ones(targets.size(0)).float().to(device)
        fake = torch.zeros(targets.size(0)).float().to(device)
        

        ### FORWARD AND BACK PROP
        
        
        # --------------------------
        # Train Generator
        # --------------------------
        
        # Make new images
        z = torch.zeros((targets.size(0), LATENT_DIM)).uniform_(-1.0, 1.0).to(device)
        generated_features = model.generator_forward(z)
        
        # Loss for fooling the discriminator
        discr_pred = model.discriminator_forward(generated_features.view(targets.size(0), 1, 28, 28))
        
        gener_loss = F.binary_cross_entropy_with_logits(discr_pred, valid)
        
        optim_gener.zero_grad()
        gener_loss.backward()
        optim_gener.step()
        
        # --------------------------
        # Train Discriminator
        # --------------------------        
        
        discr_pred_real = model.discriminator_forward(features.view(targets.size(0), 1, 28, 28))
        real_loss = F.binary_cross_entropy_with_logits(discr_pred_real, valid)
        
        discr_pred_fake = model.discriminator_forward(generated_features.view(targets.size(0), 1, 28, 28).detach())
        fake_loss = F.binary_cross_entropy_with_logits(discr_pred_fake, fake)
        
        discr_loss = 0.5*(real_loss + fake_loss)

        optim_discr.zero_grad()
        discr_loss.backward()
        optim_discr.step()        
        
        discr_costs.append(discr_loss.item())
        gener_costs.append(gener_loss.item())
        
        
        ### LOGGING
        if not batch_idx % 100:
            print ('Epoch: %03d/%03d | Batch %03d/%03d | Gen/Dis Loss: %.4f/%.4f' 
                   %(epoch+1, NUM_EPOCHS, batch_idx, 
                     len(train_loader), gener_loss, discr_loss))

    print('Time elapsed: %.2f min' % ((time.time() - start_time)/60))
    
print('Total Training Time: %.2f min' % ((time.time() - start_time)/60))
Epoch: 001/100 | Batch 000/469 | Gen/Dis Loss: 0.7042/0.6743
Epoch: 001/100 | Batch 100/469 | Gen/Dis Loss: 1.3058/0.3250
Epoch: 001/100 | Batch 200/469 | Gen/Dis Loss: 1.4384/0.2663
Epoch: 001/100 | Batch 300/469 | Gen/Dis Loss: 1.6195/0.2611
Epoch: 001/100 | Batch 400/469 | Gen/Dis Loss: 1.5340/0.3022
Time elapsed: 0.19 min
Epoch: 002/100 | Batch 000/469 | Gen/Dis Loss: 1.1456/0.4471
Epoch: 002/100 | Batch 100/469 | Gen/Dis Loss: 1.1290/0.4476
Epoch: 002/100 | Batch 200/469 | Gen/Dis Loss: 1.0849/0.4651
Epoch: 002/100 | Batch 300/469 | Gen/Dis Loss: 1.0275/0.4765
Epoch: 002/100 | Batch 400/469 | Gen/Dis Loss: 0.9861/0.4893
Time elapsed: 0.37 min
Epoch: 003/100 | Batch 000/469 | Gen/Dis Loss: 0.9710/0.5148
Epoch: 003/100 | Batch 100/469 | Gen/Dis Loss: 0.9321/0.5574
Epoch: 003/100 | Batch 200/469 | Gen/Dis Loss: 0.9129/0.5724
Epoch: 003/100 | Batch 300/469 | Gen/Dis Loss: 0.9706/0.5348
Epoch: 003/100 | Batch 400/469 | Gen/Dis Loss: 0.9026/0.5425
Time elapsed: 0.57 min
Epoch: 004/100 | Batch 000/469 | Gen/Dis Loss: 0.9785/0.5205
Epoch: 004/100 | Batch 100/469 | Gen/Dis Loss: 0.9802/0.5120
Epoch: 004/100 | Batch 200/469 | Gen/Dis Loss: 0.9916/0.5114
Epoch: 004/100 | Batch 300/469 | Gen/Dis Loss: 0.9647/0.5268
Epoch: 004/100 | Batch 400/469 | Gen/Dis Loss: 1.0392/0.5014
Time elapsed: 0.76 min
Epoch: 005/100 | Batch 000/469 | Gen/Dis Loss: 1.0477/0.5005
Epoch: 005/100 | Batch 100/469 | Gen/Dis Loss: 0.9455/0.5028
Epoch: 005/100 | Batch 200/469 | Gen/Dis Loss: 1.0274/0.5099
Epoch: 005/100 | Batch 300/469 | Gen/Dis Loss: 0.9592/0.5301
Epoch: 005/100 | Batch 400/469 | Gen/Dis Loss: 0.9769/0.5288
Time elapsed: 0.94 min
Epoch: 006/100 | Batch 000/469 | Gen/Dis Loss: 1.1041/0.4773
Epoch: 006/100 | Batch 100/469 | Gen/Dis Loss: 1.0670/0.4941
Epoch: 006/100 | Batch 200/469 | Gen/Dis Loss: 1.0629/0.4827
Epoch: 006/100 | Batch 300/469 | Gen/Dis Loss: 1.0223/0.4908
Epoch: 006/100 | Batch 400/469 | Gen/Dis Loss: 1.1509/0.4545
Time elapsed: 1.14 min
Epoch: 007/100 | Batch 000/469 | Gen/Dis Loss: 1.1102/0.4551
Epoch: 007/100 | Batch 100/469 | Gen/Dis Loss: 1.0865/0.4770
Epoch: 007/100 | Batch 200/469 | Gen/Dis Loss: 1.1400/0.4882
Epoch: 007/100 | Batch 300/469 | Gen/Dis Loss: 1.1405/0.4388
Epoch: 007/100 | Batch 400/469 | Gen/Dis Loss: 1.0639/0.5024
Time elapsed: 1.33 min
Epoch: 008/100 | Batch 000/469 | Gen/Dis Loss: 1.1032/0.4786
Epoch: 008/100 | Batch 100/469 | Gen/Dis Loss: 1.1949/0.4811
Epoch: 008/100 | Batch 200/469 | Gen/Dis Loss: 1.0076/0.4845
Epoch: 008/100 | Batch 300/469 | Gen/Dis Loss: 1.1185/0.4682
Epoch: 008/100 | Batch 400/469 | Gen/Dis Loss: 1.0211/0.4773
Time elapsed: 1.51 min
Epoch: 009/100 | Batch 000/469 | Gen/Dis Loss: 1.1309/0.4880
Epoch: 009/100 | Batch 100/469 | Gen/Dis Loss: 1.1688/0.4936
Epoch: 009/100 | Batch 200/469 | Gen/Dis Loss: 1.0846/0.4920
Epoch: 009/100 | Batch 300/469 | Gen/Dis Loss: 1.0401/0.4875
Epoch: 009/100 | Batch 400/469 | Gen/Dis Loss: 1.1135/0.4437
Time elapsed: 1.69 min
Epoch: 010/100 | Batch 000/469 | Gen/Dis Loss: 1.1250/0.4552
Epoch: 010/100 | Batch 100/469 | Gen/Dis Loss: 1.1869/0.4754
Epoch: 010/100 | Batch 200/469 | Gen/Dis Loss: 1.0266/0.5211
Epoch: 010/100 | Batch 300/469 | Gen/Dis Loss: 1.0281/0.4855
Epoch: 010/100 | Batch 400/469 | Gen/Dis Loss: 1.1443/0.5059
Time elapsed: 1.86 min
Epoch: 011/100 | Batch 000/469 | Gen/Dis Loss: 1.1782/0.4433
Epoch: 011/100 | Batch 100/469 | Gen/Dis Loss: 1.2944/0.4828
Epoch: 011/100 | Batch 200/469 | Gen/Dis Loss: 1.2939/0.4710
Epoch: 011/100 | Batch 300/469 | Gen/Dis Loss: 0.9880/0.5353
Epoch: 011/100 | Batch 400/469 | Gen/Dis Loss: 1.0860/0.5044
Time elapsed: 2.04 min
Epoch: 012/100 | Batch 000/469 | Gen/Dis Loss: 1.0354/0.4889
Epoch: 012/100 | Batch 100/469 | Gen/Dis Loss: 1.0483/0.4908
Epoch: 012/100 | Batch 200/469 | Gen/Dis Loss: 1.0234/0.5043
Epoch: 012/100 | Batch 300/469 | Gen/Dis Loss: 1.2044/0.4811
Epoch: 012/100 | Batch 400/469 | Gen/Dis Loss: 1.1738/0.4902
Time elapsed: 2.21 min
Epoch: 013/100 | Batch 000/469 | Gen/Dis Loss: 1.1903/0.4955
Epoch: 013/100 | Batch 100/469 | Gen/Dis Loss: 1.1368/0.5403
Epoch: 013/100 | Batch 200/469 | Gen/Dis Loss: 1.0993/0.4859
Epoch: 013/100 | Batch 300/469 | Gen/Dis Loss: 1.0989/0.5293
Epoch: 013/100 | Batch 400/469 | Gen/Dis Loss: 1.0223/0.5568
Time elapsed: 2.40 min
Epoch: 014/100 | Batch 000/469 | Gen/Dis Loss: 1.1139/0.5405
Epoch: 014/100 | Batch 100/469 | Gen/Dis Loss: 1.1770/0.4788
Epoch: 014/100 | Batch 200/469 | Gen/Dis Loss: 1.1685/0.4993
Epoch: 014/100 | Batch 300/469 | Gen/Dis Loss: 1.0546/0.5169
Epoch: 014/100 | Batch 400/469 | Gen/Dis Loss: 1.1147/0.5244
Time elapsed: 2.59 min
Epoch: 015/100 | Batch 000/469 | Gen/Dis Loss: 0.9739/0.5662
Epoch: 015/100 | Batch 100/469 | Gen/Dis Loss: 0.9286/0.5574
Epoch: 015/100 | Batch 200/469 | Gen/Dis Loss: 1.0893/0.5187
Epoch: 015/100 | Batch 300/469 | Gen/Dis Loss: 1.0183/0.5348
Epoch: 015/100 | Batch 400/469 | Gen/Dis Loss: 1.0253/0.5727
Time elapsed: 2.78 min
Epoch: 016/100 | Batch 000/469 | Gen/Dis Loss: 1.0393/0.5658
Epoch: 016/100 | Batch 100/469 | Gen/Dis Loss: 0.9653/0.5572
Epoch: 016/100 | Batch 200/469 | Gen/Dis Loss: 1.1106/0.5044
Epoch: 016/100 | Batch 300/469 | Gen/Dis Loss: 1.0155/0.5480
Epoch: 016/100 | Batch 400/469 | Gen/Dis Loss: 1.0312/0.5223
Time elapsed: 2.96 min
Epoch: 017/100 | Batch 000/469 | Gen/Dis Loss: 1.0040/0.5840
Epoch: 017/100 | Batch 100/469 | Gen/Dis Loss: 1.0765/0.5318
Epoch: 017/100 | Batch 200/469 | Gen/Dis Loss: 1.0712/0.5204
Epoch: 017/100 | Batch 300/469 | Gen/Dis Loss: 1.0746/0.5833
Epoch: 017/100 | Batch 400/469 | Gen/Dis Loss: 1.0548/0.5324
Time elapsed: 3.15 min
Epoch: 018/100 | Batch 000/469 | Gen/Dis Loss: 0.9197/0.5617
Epoch: 018/100 | Batch 100/469 | Gen/Dis Loss: 1.0251/0.5290
Epoch: 018/100 | Batch 200/469 | Gen/Dis Loss: 0.9719/0.5501
Epoch: 018/100 | Batch 300/469 | Gen/Dis Loss: 1.0612/0.5575
Epoch: 018/100 | Batch 400/469 | Gen/Dis Loss: 0.9316/0.5657
Time elapsed: 3.34 min
Epoch: 019/100 | Batch 000/469 | Gen/Dis Loss: 0.9538/0.5692
Epoch: 019/100 | Batch 100/469 | Gen/Dis Loss: 0.9766/0.5171
Epoch: 019/100 | Batch 200/469 | Gen/Dis Loss: 1.1488/0.4569
Epoch: 019/100 | Batch 300/469 | Gen/Dis Loss: 0.9186/0.5640
Epoch: 019/100 | Batch 400/469 | Gen/Dis Loss: 0.9183/0.6167
Time elapsed: 3.52 min
Epoch: 020/100 | Batch 000/469 | Gen/Dis Loss: 1.0272/0.5424
Epoch: 020/100 | Batch 100/469 | Gen/Dis Loss: 0.9360/0.6114
Epoch: 020/100 | Batch 200/469 | Gen/Dis Loss: 1.0169/0.5299
Epoch: 020/100 | Batch 300/469 | Gen/Dis Loss: 1.0314/0.5425
Epoch: 020/100 | Batch 400/469 | Gen/Dis Loss: 0.9730/0.5540
Time elapsed: 3.70 min
Epoch: 021/100 | Batch 000/469 | Gen/Dis Loss: 1.0281/0.5627
Epoch: 021/100 | Batch 100/469 | Gen/Dis Loss: 1.0060/0.6017
Epoch: 021/100 | Batch 200/469 | Gen/Dis Loss: 1.0429/0.5913
Epoch: 021/100 | Batch 300/469 | Gen/Dis Loss: 1.0129/0.5390
Epoch: 021/100 | Batch 400/469 | Gen/Dis Loss: 1.0230/0.5921
Time elapsed: 3.88 min
Epoch: 022/100 | Batch 000/469 | Gen/Dis Loss: 0.8210/0.5850
Epoch: 022/100 | Batch 100/469 | Gen/Dis Loss: 0.9765/0.6000
Epoch: 022/100 | Batch 200/469 | Gen/Dis Loss: 0.9104/0.5788
Epoch: 022/100 | Batch 300/469 | Gen/Dis Loss: 0.8663/0.5950
Epoch: 022/100 | Batch 400/469 | Gen/Dis Loss: 1.0385/0.5803
Time elapsed: 4.07 min
Epoch: 023/100 | Batch 000/469 | Gen/Dis Loss: 0.9640/0.5412
Epoch: 023/100 | Batch 100/469 | Gen/Dis Loss: 0.9348/0.5825
Epoch: 023/100 | Batch 200/469 | Gen/Dis Loss: 1.0439/0.5931
Epoch: 023/100 | Batch 300/469 | Gen/Dis Loss: 1.0064/0.6169
Epoch: 023/100 | Batch 400/469 | Gen/Dis Loss: 0.9615/0.5814
Time elapsed: 4.26 min
Epoch: 024/100 | Batch 000/469 | Gen/Dis Loss: 0.9748/0.5527
Epoch: 024/100 | Batch 100/469 | Gen/Dis Loss: 0.9593/0.5664
Epoch: 024/100 | Batch 200/469 | Gen/Dis Loss: 0.8329/0.6269
Epoch: 024/100 | Batch 300/469 | Gen/Dis Loss: 0.9947/0.6028
Epoch: 024/100 | Batch 400/469 | Gen/Dis Loss: 0.9175/0.6207
Time elapsed: 4.44 min
Epoch: 025/100 | Batch 000/469 | Gen/Dis Loss: 0.9711/0.6147
Epoch: 025/100 | Batch 100/469 | Gen/Dis Loss: 0.8441/0.5788
Epoch: 025/100 | Batch 200/469 | Gen/Dis Loss: 0.8804/0.6257
Epoch: 025/100 | Batch 300/469 | Gen/Dis Loss: 0.8894/0.6305
Epoch: 025/100 | Batch 400/469 | Gen/Dis Loss: 1.0003/0.5611
Time elapsed: 4.62 min
Epoch: 026/100 | Batch 000/469 | Gen/Dis Loss: 0.9185/0.5670
Epoch: 026/100 | Batch 100/469 | Gen/Dis Loss: 0.8038/0.6254
Epoch: 026/100 | Batch 200/469 | Gen/Dis Loss: 0.9098/0.6414
Epoch: 026/100 | Batch 300/469 | Gen/Dis Loss: 0.9220/0.6424
Epoch: 026/100 | Batch 400/469 | Gen/Dis Loss: 0.9396/0.6200
Time elapsed: 4.80 min
Epoch: 027/100 | Batch 000/469 | Gen/Dis Loss: 0.9383/0.5994
Epoch: 027/100 | Batch 100/469 | Gen/Dis Loss: 0.9374/0.6290
Epoch: 027/100 | Batch 200/469 | Gen/Dis Loss: 0.9360/0.5682
Epoch: 027/100 | Batch 300/469 | Gen/Dis Loss: 0.9031/0.6194
Epoch: 027/100 | Batch 400/469 | Gen/Dis Loss: 0.9073/0.6276
Time elapsed: 4.97 min
Epoch: 028/100 | Batch 000/469 | Gen/Dis Loss: 0.9361/0.6210
Epoch: 028/100 | Batch 100/469 | Gen/Dis Loss: 0.9590/0.6085
Epoch: 028/100 | Batch 200/469 | Gen/Dis Loss: 0.9330/0.6403
Epoch: 028/100 | Batch 300/469 | Gen/Dis Loss: 0.8401/0.6287
Epoch: 028/100 | Batch 400/469 | Gen/Dis Loss: 0.9091/0.5869
Time elapsed: 5.15 min
Epoch: 029/100 | Batch 000/469 | Gen/Dis Loss: 1.0263/0.5919
Epoch: 029/100 | Batch 100/469 | Gen/Dis Loss: 0.9032/0.6344
Epoch: 029/100 | Batch 200/469 | Gen/Dis Loss: 0.9062/0.6374
Epoch: 029/100 | Batch 300/469 | Gen/Dis Loss: 0.8570/0.6521
Epoch: 029/100 | Batch 400/469 | Gen/Dis Loss: 0.8176/0.6735
Time elapsed: 5.33 min
Epoch: 030/100 | Batch 000/469 | Gen/Dis Loss: 0.8319/0.6868
Epoch: 030/100 | Batch 100/469 | Gen/Dis Loss: 0.8666/0.6535
Epoch: 030/100 | Batch 200/469 | Gen/Dis Loss: 0.8510/0.6624
Epoch: 030/100 | Batch 300/469 | Gen/Dis Loss: 0.8705/0.6354
Epoch: 030/100 | Batch 400/469 | Gen/Dis Loss: 0.8534/0.6467
Time elapsed: 5.52 min
Epoch: 031/100 | Batch 000/469 | Gen/Dis Loss: 0.9562/0.6488
Epoch: 031/100 | Batch 100/469 | Gen/Dis Loss: 0.8501/0.6209
Epoch: 031/100 | Batch 200/469 | Gen/Dis Loss: 0.8582/0.6281
Epoch: 031/100 | Batch 300/469 | Gen/Dis Loss: 0.8317/0.6526
Epoch: 031/100 | Batch 400/469 | Gen/Dis Loss: 0.8294/0.6443
Time elapsed: 5.70 min
Epoch: 032/100 | Batch 000/469 | Gen/Dis Loss: 0.8732/0.6642
Epoch: 032/100 | Batch 100/469 | Gen/Dis Loss: 0.9039/0.5738
Epoch: 032/100 | Batch 200/469 | Gen/Dis Loss: 0.9144/0.6086
Epoch: 032/100 | Batch 300/469 | Gen/Dis Loss: 0.9018/0.6199
Epoch: 032/100 | Batch 400/469 | Gen/Dis Loss: 0.9000/0.6288
Time elapsed: 5.89 min
Epoch: 033/100 | Batch 000/469 | Gen/Dis Loss: 0.9015/0.6006
Epoch: 033/100 | Batch 100/469 | Gen/Dis Loss: 0.8965/0.6364
Epoch: 033/100 | Batch 200/469 | Gen/Dis Loss: 0.8716/0.6174
Epoch: 033/100 | Batch 300/469 | Gen/Dis Loss: 0.7849/0.6481
Epoch: 033/100 | Batch 400/469 | Gen/Dis Loss: 0.8660/0.6665
Time elapsed: 6.08 min
Epoch: 034/100 | Batch 000/469 | Gen/Dis Loss: 0.8753/0.6632
Epoch: 034/100 | Batch 100/469 | Gen/Dis Loss: 0.8555/0.6768
Epoch: 034/100 | Batch 200/469 | Gen/Dis Loss: 0.9026/0.6141
Epoch: 034/100 | Batch 300/469 | Gen/Dis Loss: 0.8592/0.6660
Epoch: 034/100 | Batch 400/469 | Gen/Dis Loss: 0.8328/0.6547
Time elapsed: 6.26 min
Epoch: 035/100 | Batch 000/469 | Gen/Dis Loss: 0.8191/0.6640
Epoch: 035/100 | Batch 100/469 | Gen/Dis Loss: 0.8682/0.6384
Epoch: 035/100 | Batch 200/469 | Gen/Dis Loss: 0.8659/0.6529
Epoch: 035/100 | Batch 300/469 | Gen/Dis Loss: 0.7945/0.6897
Epoch: 035/100 | Batch 400/469 | Gen/Dis Loss: 0.8761/0.6396
Time elapsed: 6.44 min
Epoch: 036/100 | Batch 000/469 | Gen/Dis Loss: 0.7889/0.6610
Epoch: 036/100 | Batch 100/469 | Gen/Dis Loss: 0.8540/0.6306
Epoch: 036/100 | Batch 200/469 | Gen/Dis Loss: 0.8731/0.6181
Epoch: 036/100 | Batch 300/469 | Gen/Dis Loss: 0.8459/0.6869
Epoch: 036/100 | Batch 400/469 | Gen/Dis Loss: 0.9194/0.7013
Time elapsed: 6.66 min
Epoch: 037/100 | Batch 000/469 | Gen/Dis Loss: 0.8651/0.6302
Epoch: 037/100 | Batch 100/469 | Gen/Dis Loss: 0.8718/0.6649
Epoch: 037/100 | Batch 200/469 | Gen/Dis Loss: 0.8470/0.6905
Epoch: 037/100 | Batch 300/469 | Gen/Dis Loss: 0.8514/0.6743
Epoch: 037/100 | Batch 400/469 | Gen/Dis Loss: 0.8256/0.6896
Time elapsed: 6.84 min
Epoch: 038/100 | Batch 000/469 | Gen/Dis Loss: 0.7981/0.6623
Epoch: 038/100 | Batch 100/469 | Gen/Dis Loss: 0.8720/0.6330
Epoch: 038/100 | Batch 200/469 | Gen/Dis Loss: 0.8146/0.6613
Epoch: 038/100 | Batch 300/469 | Gen/Dis Loss: 0.7763/0.6666
Epoch: 038/100 | Batch 400/469 | Gen/Dis Loss: 0.8228/0.6759
Time elapsed: 7.03 min
Epoch: 039/100 | Batch 000/469 | Gen/Dis Loss: 0.7860/0.6432
Epoch: 039/100 | Batch 100/469 | Gen/Dis Loss: 0.7736/0.6537
Epoch: 039/100 | Batch 200/469 | Gen/Dis Loss: 0.8010/0.6488
Epoch: 039/100 | Batch 300/469 | Gen/Dis Loss: 0.7907/0.6811
Epoch: 039/100 | Batch 400/469 | Gen/Dis Loss: 0.8156/0.6962
Time elapsed: 7.22 min
Epoch: 040/100 | Batch 000/469 | Gen/Dis Loss: 0.8877/0.6374
Epoch: 040/100 | Batch 100/469 | Gen/Dis Loss: 0.8237/0.6674
Epoch: 040/100 | Batch 200/469 | Gen/Dis Loss: 0.7927/0.6621
Epoch: 040/100 | Batch 300/469 | Gen/Dis Loss: 0.8991/0.6347
Epoch: 040/100 | Batch 400/469 | Gen/Dis Loss: 0.7980/0.6735
Time elapsed: 7.41 min
Epoch: 041/100 | Batch 000/469 | Gen/Dis Loss: 0.8294/0.6596
Epoch: 041/100 | Batch 100/469 | Gen/Dis Loss: 0.8119/0.6490
Epoch: 041/100 | Batch 200/469 | Gen/Dis Loss: 0.8270/0.6643
Epoch: 041/100 | Batch 300/469 | Gen/Dis Loss: 0.8329/0.6330
Epoch: 041/100 | Batch 400/469 | Gen/Dis Loss: 0.8065/0.6905
Time elapsed: 7.60 min
Epoch: 042/100 | Batch 000/469 | Gen/Dis Loss: 0.8127/0.6444
Epoch: 042/100 | Batch 100/469 | Gen/Dis Loss: 0.8496/0.6829
Epoch: 042/100 | Batch 200/469 | Gen/Dis Loss: 0.7432/0.6781
Epoch: 042/100 | Batch 300/469 | Gen/Dis Loss: 0.8063/0.6520
Epoch: 042/100 | Batch 400/469 | Gen/Dis Loss: 0.7978/0.6950
Time elapsed: 7.79 min
Epoch: 043/100 | Batch 000/469 | Gen/Dis Loss: 0.7393/0.6981
Epoch: 043/100 | Batch 100/469 | Gen/Dis Loss: 0.7949/0.6436
Epoch: 043/100 | Batch 200/469 | Gen/Dis Loss: 0.7263/0.6792
Epoch: 043/100 | Batch 300/469 | Gen/Dis Loss: 0.8445/0.6574
Epoch: 043/100 | Batch 400/469 | Gen/Dis Loss: 0.8083/0.6932
Time elapsed: 7.98 min
Epoch: 044/100 | Batch 000/469 | Gen/Dis Loss: 0.8016/0.6728
Epoch: 044/100 | Batch 100/469 | Gen/Dis Loss: 0.8368/0.6798
Epoch: 044/100 | Batch 200/469 | Gen/Dis Loss: 0.7767/0.6624
Epoch: 044/100 | Batch 300/469 | Gen/Dis Loss: 0.8159/0.7030
Epoch: 044/100 | Batch 400/469 | Gen/Dis Loss: 0.7738/0.6814
Time elapsed: 8.17 min
Epoch: 045/100 | Batch 000/469 | Gen/Dis Loss: 0.7911/0.6656
Epoch: 045/100 | Batch 100/469 | Gen/Dis Loss: 0.7467/0.6722
Epoch: 045/100 | Batch 200/469 | Gen/Dis Loss: 0.8361/0.6641
Epoch: 045/100 | Batch 300/469 | Gen/Dis Loss: 0.8219/0.6827
Epoch: 045/100 | Batch 400/469 | Gen/Dis Loss: 0.7924/0.6497
Time elapsed: 8.36 min
Epoch: 046/100 | Batch 000/469 | Gen/Dis Loss: 0.7680/0.6488
Epoch: 046/100 | Batch 100/469 | Gen/Dis Loss: 0.7731/0.6371
Epoch: 046/100 | Batch 200/469 | Gen/Dis Loss: 0.7511/0.6632
Epoch: 046/100 | Batch 300/469 | Gen/Dis Loss: 0.7988/0.6634
Epoch: 046/100 | Batch 400/469 | Gen/Dis Loss: 0.7706/0.6666
Time elapsed: 8.54 min
Epoch: 047/100 | Batch 000/469 | Gen/Dis Loss: 0.8048/0.6702
Epoch: 047/100 | Batch 100/469 | Gen/Dis Loss: 0.8293/0.6751
Epoch: 047/100 | Batch 200/469 | Gen/Dis Loss: 0.8024/0.6686
Epoch: 047/100 | Batch 300/469 | Gen/Dis Loss: 0.7915/0.6438
Epoch: 047/100 | Batch 400/469 | Gen/Dis Loss: 0.7823/0.6841
Time elapsed: 8.72 min
Epoch: 048/100 | Batch 000/469 | Gen/Dis Loss: 0.8267/0.6457
Epoch: 048/100 | Batch 100/469 | Gen/Dis Loss: 0.7614/0.6931
Epoch: 048/100 | Batch 200/469 | Gen/Dis Loss: 0.7751/0.6927
Epoch: 048/100 | Batch 300/469 | Gen/Dis Loss: 0.7868/0.6658
Epoch: 048/100 | Batch 400/469 | Gen/Dis Loss: 0.7797/0.6885
Time elapsed: 8.90 min
Epoch: 049/100 | Batch 000/469 | Gen/Dis Loss: 0.7799/0.6506
Epoch: 049/100 | Batch 100/469 | Gen/Dis Loss: 0.8884/0.6786
Epoch: 049/100 | Batch 200/469 | Gen/Dis Loss: 0.7651/0.6634
Epoch: 049/100 | Batch 300/469 | Gen/Dis Loss: 0.7538/0.6758
Epoch: 049/100 | Batch 400/469 | Gen/Dis Loss: 0.7760/0.6950
Time elapsed: 9.08 min
Epoch: 050/100 | Batch 000/469 | Gen/Dis Loss: 0.7811/0.6833
Epoch: 050/100 | Batch 100/469 | Gen/Dis Loss: 0.7777/0.6739
Epoch: 050/100 | Batch 200/469 | Gen/Dis Loss: 0.7786/0.6908
Epoch: 050/100 | Batch 300/469 | Gen/Dis Loss: 0.7416/0.7097
Epoch: 050/100 | Batch 400/469 | Gen/Dis Loss: 0.7953/0.6441
Time elapsed: 9.26 min
Epoch: 051/100 | Batch 000/469 | Gen/Dis Loss: 0.8031/0.6554
Epoch: 051/100 | Batch 100/469 | Gen/Dis Loss: 0.7401/0.6747
Epoch: 051/100 | Batch 200/469 | Gen/Dis Loss: 0.8171/0.7023
Epoch: 051/100 | Batch 300/469 | Gen/Dis Loss: 0.7133/0.7171
Epoch: 051/100 | Batch 400/469 | Gen/Dis Loss: 0.7473/0.6879
Time elapsed: 9.45 min
Epoch: 052/100 | Batch 000/469 | Gen/Dis Loss: 0.7421/0.6984
Epoch: 052/100 | Batch 100/469 | Gen/Dis Loss: 0.7648/0.6513
Epoch: 052/100 | Batch 200/469 | Gen/Dis Loss: 0.7021/0.6979
Epoch: 052/100 | Batch 300/469 | Gen/Dis Loss: 0.7776/0.6661
Epoch: 052/100 | Batch 400/469 | Gen/Dis Loss: 0.8062/0.6475
Time elapsed: 9.64 min
Epoch: 053/100 | Batch 000/469 | Gen/Dis Loss: 0.8263/0.7115
Epoch: 053/100 | Batch 100/469 | Gen/Dis Loss: 0.7361/0.6831
Epoch: 053/100 | Batch 200/469 | Gen/Dis Loss: 0.7696/0.6979
Epoch: 053/100 | Batch 300/469 | Gen/Dis Loss: 0.8046/0.6792
Epoch: 053/100 | Batch 400/469 | Gen/Dis Loss: 0.7493/0.6789
Time elapsed: 9.83 min
Epoch: 054/100 | Batch 000/469 | Gen/Dis Loss: 0.7835/0.6666
Epoch: 054/100 | Batch 100/469 | Gen/Dis Loss: 0.7686/0.7022
Epoch: 054/100 | Batch 200/469 | Gen/Dis Loss: 0.8027/0.6755
Epoch: 054/100 | Batch 300/469 | Gen/Dis Loss: 0.7231/0.6783
Epoch: 054/100 | Batch 400/469 | Gen/Dis Loss: 0.7606/0.6695
Time elapsed: 10.02 min
Epoch: 055/100 | Batch 000/469 | Gen/Dis Loss: 0.7433/0.6899
Epoch: 055/100 | Batch 100/469 | Gen/Dis Loss: 0.7530/0.7002
Epoch: 055/100 | Batch 200/469 | Gen/Dis Loss: 0.7508/0.6859
Epoch: 055/100 | Batch 300/469 | Gen/Dis Loss: 0.7509/0.6886
Epoch: 055/100 | Batch 400/469 | Gen/Dis Loss: 0.8238/0.6777
Time elapsed: 10.19 min
Epoch: 056/100 | Batch 000/469 | Gen/Dis Loss: 0.7396/0.6836
Epoch: 056/100 | Batch 100/469 | Gen/Dis Loss: 0.8296/0.6512
Epoch: 056/100 | Batch 200/469 | Gen/Dis Loss: 0.7735/0.6803
Epoch: 056/100 | Batch 300/469 | Gen/Dis Loss: 0.7542/0.6854
Epoch: 056/100 | Batch 400/469 | Gen/Dis Loss: 0.7773/0.6647
Time elapsed: 10.36 min
Epoch: 057/100 | Batch 000/469 | Gen/Dis Loss: 0.6869/0.7104
Epoch: 057/100 | Batch 100/469 | Gen/Dis Loss: 0.7571/0.6782
Epoch: 057/100 | Batch 200/469 | Gen/Dis Loss: 0.7453/0.6853
Epoch: 057/100 | Batch 300/469 | Gen/Dis Loss: 0.7503/0.6680
Epoch: 057/100 | Batch 400/469 | Gen/Dis Loss: 0.7822/0.6613
Time elapsed: 10.55 min
Epoch: 058/100 | Batch 000/469 | Gen/Dis Loss: 0.7451/0.6950
Epoch: 058/100 | Batch 100/469 | Gen/Dis Loss: 0.7558/0.6745
Epoch: 058/100 | Batch 200/469 | Gen/Dis Loss: 0.7261/0.6868
Epoch: 058/100 | Batch 300/469 | Gen/Dis Loss: 0.7466/0.6538
Epoch: 058/100 | Batch 400/469 | Gen/Dis Loss: 0.7235/0.6995
Time elapsed: 10.75 min
Epoch: 059/100 | Batch 000/469 | Gen/Dis Loss: 0.7853/0.6637
Epoch: 059/100 | Batch 100/469 | Gen/Dis Loss: 0.7692/0.6566
Epoch: 059/100 | Batch 200/469 | Gen/Dis Loss: 0.7777/0.6619
Epoch: 059/100 | Batch 300/469 | Gen/Dis Loss: 0.7503/0.6659
Epoch: 059/100 | Batch 400/469 | Gen/Dis Loss: 0.7200/0.7232
Time elapsed: 10.94 min
Epoch: 060/100 | Batch 000/469 | Gen/Dis Loss: 0.7252/0.6908
Epoch: 060/100 | Batch 100/469 | Gen/Dis Loss: 0.7552/0.6910
Epoch: 060/100 | Batch 200/469 | Gen/Dis Loss: 0.7648/0.6857
Epoch: 060/100 | Batch 300/469 | Gen/Dis Loss: 0.7886/0.6721
Epoch: 060/100 | Batch 400/469 | Gen/Dis Loss: 0.7599/0.7122
Time elapsed: 11.12 min
Epoch: 061/100 | Batch 000/469 | Gen/Dis Loss: 0.7162/0.6678
Epoch: 061/100 | Batch 100/469 | Gen/Dis Loss: 0.7464/0.7128
Epoch: 061/100 | Batch 200/469 | Gen/Dis Loss: 0.7318/0.6874
Epoch: 061/100 | Batch 300/469 | Gen/Dis Loss: 0.7951/0.6544
Epoch: 061/100 | Batch 400/469 | Gen/Dis Loss: 0.7648/0.6593
Time elapsed: 11.31 min
Epoch: 062/100 | Batch 000/469 | Gen/Dis Loss: 0.7877/0.6887
Epoch: 062/100 | Batch 100/469 | Gen/Dis Loss: 0.7634/0.6638
Epoch: 062/100 | Batch 200/469 | Gen/Dis Loss: 0.7410/0.6934
Epoch: 062/100 | Batch 300/469 | Gen/Dis Loss: 0.7609/0.6808
Epoch: 062/100 | Batch 400/469 | Gen/Dis Loss: 0.7256/0.7026
Time elapsed: 11.50 min
Epoch: 063/100 | Batch 000/469 | Gen/Dis Loss: 0.7176/0.6757
Epoch: 063/100 | Batch 100/469 | Gen/Dis Loss: 0.7205/0.6757
Epoch: 063/100 | Batch 200/469 | Gen/Dis Loss: 0.7680/0.7091
Epoch: 063/100 | Batch 300/469 | Gen/Dis Loss: 0.7366/0.6609
Epoch: 063/100 | Batch 400/469 | Gen/Dis Loss: 0.7274/0.6871
Time elapsed: 11.69 min
Epoch: 064/100 | Batch 000/469 | Gen/Dis Loss: 0.7007/0.7226
Epoch: 064/100 | Batch 100/469 | Gen/Dis Loss: 0.7355/0.6728
Epoch: 064/100 | Batch 200/469 | Gen/Dis Loss: 0.7557/0.6936
Epoch: 064/100 | Batch 300/469 | Gen/Dis Loss: 0.7437/0.6898
Epoch: 064/100 | Batch 400/469 | Gen/Dis Loss: 0.7512/0.7044
Time elapsed: 11.87 min
Epoch: 065/100 | Batch 000/469 | Gen/Dis Loss: 0.7302/0.6740
Epoch: 065/100 | Batch 100/469 | Gen/Dis Loss: 0.7775/0.6942
Epoch: 065/100 | Batch 200/469 | Gen/Dis Loss: 0.7653/0.7036
Epoch: 065/100 | Batch 300/469 | Gen/Dis Loss: 0.7685/0.6466
Epoch: 065/100 | Batch 400/469 | Gen/Dis Loss: 0.7480/0.6988
Time elapsed: 12.06 min
Epoch: 066/100 | Batch 000/469 | Gen/Dis Loss: 0.7193/0.6841
Epoch: 066/100 | Batch 100/469 | Gen/Dis Loss: 0.7175/0.6873
Epoch: 066/100 | Batch 200/469 | Gen/Dis Loss: 0.7521/0.6564
Epoch: 066/100 | Batch 300/469 | Gen/Dis Loss: 0.6923/0.6908
Epoch: 066/100 | Batch 400/469 | Gen/Dis Loss: 0.7063/0.7025
Time elapsed: 12.25 min
Epoch: 067/100 | Batch 000/469 | Gen/Dis Loss: 0.7407/0.7015
Epoch: 067/100 | Batch 100/469 | Gen/Dis Loss: 0.7383/0.6728
Epoch: 067/100 | Batch 200/469 | Gen/Dis Loss: 0.7197/0.7090
Epoch: 067/100 | Batch 300/469 | Gen/Dis Loss: 0.7496/0.6714
Epoch: 067/100 | Batch 400/469 | Gen/Dis Loss: 0.7678/0.6930
Time elapsed: 12.43 min
Epoch: 068/100 | Batch 000/469 | Gen/Dis Loss: 0.6839/0.7058
Epoch: 068/100 | Batch 100/469 | Gen/Dis Loss: 0.7106/0.7098
Epoch: 068/100 | Batch 200/469 | Gen/Dis Loss: 0.7275/0.6948
Epoch: 068/100 | Batch 300/469 | Gen/Dis Loss: 0.7247/0.6653
Epoch: 068/100 | Batch 400/469 | Gen/Dis Loss: 0.7031/0.7208
Time elapsed: 12.62 min
Epoch: 069/100 | Batch 000/469 | Gen/Dis Loss: 0.7105/0.6994
Epoch: 069/100 | Batch 100/469 | Gen/Dis Loss: 0.7530/0.6780
Epoch: 069/100 | Batch 200/469 | Gen/Dis Loss: 0.7811/0.6714
Epoch: 069/100 | Batch 300/469 | Gen/Dis Loss: 0.7101/0.6895
Epoch: 069/100 | Batch 400/469 | Gen/Dis Loss: 0.7741/0.6897
Time elapsed: 12.82 min
Epoch: 070/100 | Batch 000/469 | Gen/Dis Loss: 0.7459/0.6647
Epoch: 070/100 | Batch 100/469 | Gen/Dis Loss: 0.7229/0.6980
Epoch: 070/100 | Batch 200/469 | Gen/Dis Loss: 0.7142/0.6880
Epoch: 070/100 | Batch 300/469 | Gen/Dis Loss: 0.7575/0.6888
Epoch: 070/100 | Batch 400/469 | Gen/Dis Loss: 0.7443/0.6688
Time elapsed: 12.99 min
Epoch: 071/100 | Batch 000/469 | Gen/Dis Loss: 0.7392/0.6906
Epoch: 071/100 | Batch 100/469 | Gen/Dis Loss: 0.7221/0.7070
Epoch: 071/100 | Batch 200/469 | Gen/Dis Loss: 0.7066/0.7064
Epoch: 071/100 | Batch 300/469 | Gen/Dis Loss: 0.7346/0.6812
Epoch: 071/100 | Batch 400/469 | Gen/Dis Loss: 0.6982/0.7046
Time elapsed: 13.18 min
Epoch: 072/100 | Batch 000/469 | Gen/Dis Loss: 0.7182/0.7272
Epoch: 072/100 | Batch 100/469 | Gen/Dis Loss: 0.7693/0.6998
Epoch: 072/100 | Batch 200/469 | Gen/Dis Loss: 0.6934/0.7152
Epoch: 072/100 | Batch 300/469 | Gen/Dis Loss: 0.7274/0.6977
Epoch: 072/100 | Batch 400/469 | Gen/Dis Loss: 0.7920/0.6482
Time elapsed: 13.37 min
Epoch: 073/100 | Batch 000/469 | Gen/Dis Loss: 0.7348/0.7028
Epoch: 073/100 | Batch 100/469 | Gen/Dis Loss: 0.7645/0.6819
Epoch: 073/100 | Batch 200/469 | Gen/Dis Loss: 0.7096/0.7190
Epoch: 073/100 | Batch 300/469 | Gen/Dis Loss: 0.7436/0.6871
Epoch: 073/100 | Batch 400/469 | Gen/Dis Loss: 0.7205/0.6853
Time elapsed: 13.57 min
Epoch: 074/100 | Batch 000/469 | Gen/Dis Loss: 0.7573/0.6771
Epoch: 074/100 | Batch 100/469 | Gen/Dis Loss: 0.7252/0.7207
Epoch: 074/100 | Batch 200/469 | Gen/Dis Loss: 0.7440/0.6935
Epoch: 074/100 | Batch 300/469 | Gen/Dis Loss: 0.6780/0.6983
Epoch: 074/100 | Batch 400/469 | Gen/Dis Loss: 0.7433/0.7149
Time elapsed: 13.75 min
Epoch: 075/100 | Batch 000/469 | Gen/Dis Loss: 0.7384/0.6855
Epoch: 075/100 | Batch 100/469 | Gen/Dis Loss: 0.7449/0.6909
Epoch: 075/100 | Batch 200/469 | Gen/Dis Loss: 0.7297/0.6915
Epoch: 075/100 | Batch 300/469 | Gen/Dis Loss: 0.7086/0.7055
Epoch: 075/100 | Batch 400/469 | Gen/Dis Loss: 0.7717/0.6480
Time elapsed: 13.92 min
Epoch: 076/100 | Batch 000/469 | Gen/Dis Loss: 0.6886/0.7025
Epoch: 076/100 | Batch 100/469 | Gen/Dis Loss: 0.7432/0.6995
Epoch: 076/100 | Batch 200/469 | Gen/Dis Loss: 0.7580/0.6889
Epoch: 076/100 | Batch 300/469 | Gen/Dis Loss: 0.7573/0.6869
Epoch: 076/100 | Batch 400/469 | Gen/Dis Loss: 0.7179/0.6700
Time elapsed: 14.11 min
Epoch: 077/100 | Batch 000/469 | Gen/Dis Loss: 0.7178/0.6770
Epoch: 077/100 | Batch 100/469 | Gen/Dis Loss: 0.7476/0.6653
Epoch: 077/100 | Batch 200/469 | Gen/Dis Loss: 0.6809/0.7183
Epoch: 077/100 | Batch 300/469 | Gen/Dis Loss: 0.7302/0.7057
Epoch: 077/100 | Batch 400/469 | Gen/Dis Loss: 0.7666/0.6755
Time elapsed: 14.30 min
Epoch: 078/100 | Batch 000/469 | Gen/Dis Loss: 0.7105/0.7080
Epoch: 078/100 | Batch 100/469 | Gen/Dis Loss: 0.7547/0.6769
Epoch: 078/100 | Batch 200/469 | Gen/Dis Loss: 0.7441/0.6780
Epoch: 078/100 | Batch 300/469 | Gen/Dis Loss: 0.7386/0.7000
Epoch: 078/100 | Batch 400/469 | Gen/Dis Loss: 0.7264/0.7095
Time elapsed: 14.48 min
Epoch: 079/100 | Batch 000/469 | Gen/Dis Loss: 0.6915/0.7170
Epoch: 079/100 | Batch 100/469 | Gen/Dis Loss: 0.7040/0.6950
Epoch: 079/100 | Batch 200/469 | Gen/Dis Loss: 0.7102/0.7167
Epoch: 079/100 | Batch 300/469 | Gen/Dis Loss: 0.7336/0.7043
Epoch: 079/100 | Batch 400/469 | Gen/Dis Loss: 0.7293/0.7282
Time elapsed: 14.65 min
Epoch: 080/100 | Batch 000/469 | Gen/Dis Loss: 0.7565/0.6738
Epoch: 080/100 | Batch 100/469 | Gen/Dis Loss: 0.7075/0.6976
Epoch: 080/100 | Batch 200/469 | Gen/Dis Loss: 0.7109/0.6959
Epoch: 080/100 | Batch 300/469 | Gen/Dis Loss: 0.7334/0.6856
Epoch: 080/100 | Batch 400/469 | Gen/Dis Loss: 0.7357/0.6709
Time elapsed: 14.84 min
Epoch: 081/100 | Batch 000/469 | Gen/Dis Loss: 0.7483/0.6797
Epoch: 081/100 | Batch 100/469 | Gen/Dis Loss: 0.7282/0.7087
Epoch: 081/100 | Batch 200/469 | Gen/Dis Loss: 0.7219/0.6852
Epoch: 081/100 | Batch 300/469 | Gen/Dis Loss: 0.7711/0.6864
Epoch: 081/100 | Batch 400/469 | Gen/Dis Loss: 0.7182/0.6824
Time elapsed: 15.03 min
Epoch: 082/100 | Batch 000/469 | Gen/Dis Loss: 0.7293/0.6931
Epoch: 082/100 | Batch 100/469 | Gen/Dis Loss: 0.7098/0.6946
Epoch: 082/100 | Batch 200/469 | Gen/Dis Loss: 0.7255/0.6813
Epoch: 082/100 | Batch 300/469 | Gen/Dis Loss: 0.7125/0.6940
Epoch: 082/100 | Batch 400/469 | Gen/Dis Loss: 0.7094/0.6864
Time elapsed: 15.21 min
Epoch: 083/100 | Batch 000/469 | Gen/Dis Loss: 0.7273/0.6865
Epoch: 083/100 | Batch 100/469 | Gen/Dis Loss: 0.7655/0.6898
Epoch: 083/100 | Batch 200/469 | Gen/Dis Loss: 0.7437/0.6973
Epoch: 083/100 | Batch 300/469 | Gen/Dis Loss: 0.7224/0.6992
Epoch: 083/100 | Batch 400/469 | Gen/Dis Loss: 0.6938/0.6923
Time elapsed: 15.40 min
Epoch: 084/100 | Batch 000/469 | Gen/Dis Loss: 0.7416/0.6993
Epoch: 084/100 | Batch 100/469 | Gen/Dis Loss: 0.7210/0.6955
Epoch: 084/100 | Batch 200/469 | Gen/Dis Loss: 0.7025/0.7031
Epoch: 084/100 | Batch 300/469 | Gen/Dis Loss: 0.7373/0.6893
Epoch: 084/100 | Batch 400/469 | Gen/Dis Loss: 0.7306/0.7161
Time elapsed: 15.57 min
Epoch: 085/100 | Batch 000/469 | Gen/Dis Loss: 0.6902/0.6930
Epoch: 085/100 | Batch 100/469 | Gen/Dis Loss: 0.6889/0.7020
Epoch: 085/100 | Batch 200/469 | Gen/Dis Loss: 0.7513/0.6646
Epoch: 085/100 | Batch 300/469 | Gen/Dis Loss: 0.7368/0.6782
Epoch: 085/100 | Batch 400/469 | Gen/Dis Loss: 0.7356/0.6797
Time elapsed: 15.76 min
Epoch: 086/100 | Batch 000/469 | Gen/Dis Loss: 0.7178/0.6932
Epoch: 086/100 | Batch 100/469 | Gen/Dis Loss: 0.7472/0.6727
Epoch: 086/100 | Batch 200/469 | Gen/Dis Loss: 0.7381/0.6805
Epoch: 086/100 | Batch 300/469 | Gen/Dis Loss: 0.7106/0.6993
Epoch: 086/100 | Batch 400/469 | Gen/Dis Loss: 0.7434/0.6789
Time elapsed: 15.94 min
Epoch: 087/100 | Batch 000/469 | Gen/Dis Loss: 0.6928/0.7186
Epoch: 087/100 | Batch 100/469 | Gen/Dis Loss: 0.7573/0.6784
Epoch: 087/100 | Batch 200/469 | Gen/Dis Loss: 0.7347/0.6868
Epoch: 087/100 | Batch 300/469 | Gen/Dis Loss: 0.6775/0.7108
Epoch: 087/100 | Batch 400/469 | Gen/Dis Loss: 0.6929/0.6987
Time elapsed: 16.12 min
Epoch: 088/100 | Batch 000/469 | Gen/Dis Loss: 0.7251/0.6847
Epoch: 088/100 | Batch 100/469 | Gen/Dis Loss: 0.6991/0.7062
Epoch: 088/100 | Batch 200/469 | Gen/Dis Loss: 0.7497/0.6982
Epoch: 088/100 | Batch 300/469 | Gen/Dis Loss: 0.7431/0.6628
Epoch: 088/100 | Batch 400/469 | Gen/Dis Loss: 0.7292/0.6867
Time elapsed: 16.30 min
Epoch: 089/100 | Batch 000/469 | Gen/Dis Loss: 0.7114/0.6985
Epoch: 089/100 | Batch 100/469 | Gen/Dis Loss: 0.7194/0.7012
Epoch: 089/100 | Batch 200/469 | Gen/Dis Loss: 0.7152/0.7091
Epoch: 089/100 | Batch 300/469 | Gen/Dis Loss: 0.7327/0.6929
Epoch: 089/100 | Batch 400/469 | Gen/Dis Loss: 0.7291/0.7052
Time elapsed: 16.50 min
Epoch: 090/100 | Batch 000/469 | Gen/Dis Loss: 0.7195/0.6888
Epoch: 090/100 | Batch 100/469 | Gen/Dis Loss: 0.7332/0.6896
Epoch: 090/100 | Batch 200/469 | Gen/Dis Loss: 0.7231/0.7014
Epoch: 090/100 | Batch 300/469 | Gen/Dis Loss: 0.7278/0.6994
Epoch: 090/100 | Batch 400/469 | Gen/Dis Loss: 0.7176/0.7053
Time elapsed: 16.69 min
Epoch: 091/100 | Batch 000/469 | Gen/Dis Loss: 0.7328/0.7058
Epoch: 091/100 | Batch 100/469 | Gen/Dis Loss: 0.7082/0.7012
Epoch: 091/100 | Batch 200/469 | Gen/Dis Loss: 0.7348/0.6876
Epoch: 091/100 | Batch 300/469 | Gen/Dis Loss: 0.7375/0.6844
Epoch: 091/100 | Batch 400/469 | Gen/Dis Loss: 0.7533/0.7017
Time elapsed: 16.87 min
Epoch: 092/100 | Batch 000/469 | Gen/Dis Loss: 0.7177/0.7161
Epoch: 092/100 | Batch 100/469 | Gen/Dis Loss: 0.7057/0.6844
Epoch: 092/100 | Batch 200/469 | Gen/Dis Loss: 0.7255/0.6894
Epoch: 092/100 | Batch 300/469 | Gen/Dis Loss: 0.7340/0.6790
Epoch: 092/100 | Batch 400/469 | Gen/Dis Loss: 0.7173/0.6768
Time elapsed: 17.04 min
Epoch: 093/100 | Batch 000/469 | Gen/Dis Loss: 0.7081/0.6885
Epoch: 093/100 | Batch 100/469 | Gen/Dis Loss: 0.7257/0.6966
Epoch: 093/100 | Batch 200/469 | Gen/Dis Loss: 0.7400/0.6814
Epoch: 093/100 | Batch 300/469 | Gen/Dis Loss: 0.7158/0.7051
Epoch: 093/100 | Batch 400/469 | Gen/Dis Loss: 0.7222/0.6804
Time elapsed: 17.24 min
Epoch: 094/100 | Batch 000/469 | Gen/Dis Loss: 0.7450/0.6768
Epoch: 094/100 | Batch 100/469 | Gen/Dis Loss: 0.7266/0.7039
Epoch: 094/100 | Batch 200/469 | Gen/Dis Loss: 0.7201/0.6976
Epoch: 094/100 | Batch 300/469 | Gen/Dis Loss: 0.7266/0.7124
Epoch: 094/100 | Batch 400/469 | Gen/Dis Loss: 0.7196/0.6774
Time elapsed: 17.44 min
Epoch: 095/100 | Batch 000/469 | Gen/Dis Loss: 0.7398/0.6790
Epoch: 095/100 | Batch 100/469 | Gen/Dis Loss: 0.7055/0.6950
Epoch: 095/100 | Batch 200/469 | Gen/Dis Loss: 0.7315/0.6979
Epoch: 095/100 | Batch 300/469 | Gen/Dis Loss: 0.7130/0.6876
Epoch: 095/100 | Batch 400/469 | Gen/Dis Loss: 0.7167/0.6679
Time elapsed: 17.61 min
Epoch: 096/100 | Batch 000/469 | Gen/Dis Loss: 0.6865/0.6977
Epoch: 096/100 | Batch 100/469 | Gen/Dis Loss: 0.7365/0.6776
Epoch: 096/100 | Batch 200/469 | Gen/Dis Loss: 0.7084/0.7021
Epoch: 096/100 | Batch 300/469 | Gen/Dis Loss: 0.7397/0.6880
Epoch: 096/100 | Batch 400/469 | Gen/Dis Loss: 0.7080/0.7179
Time elapsed: 17.78 min
Epoch: 097/100 | Batch 000/469 | Gen/Dis Loss: 0.7208/0.6825
Epoch: 097/100 | Batch 100/469 | Gen/Dis Loss: 0.7231/0.6816
Epoch: 097/100 | Batch 200/469 | Gen/Dis Loss: 0.7159/0.6914
Epoch: 097/100 | Batch 300/469 | Gen/Dis Loss: 0.7144/0.7064
Epoch: 097/100 | Batch 400/469 | Gen/Dis Loss: 0.7088/0.7048
Time elapsed: 17.98 min
Epoch: 098/100 | Batch 000/469 | Gen/Dis Loss: 0.7247/0.7005
Epoch: 098/100 | Batch 100/469 | Gen/Dis Loss: 0.7675/0.6761
Epoch: 098/100 | Batch 200/469 | Gen/Dis Loss: 0.7218/0.6958
Epoch: 098/100 | Batch 300/469 | Gen/Dis Loss: 0.7278/0.6866
Epoch: 098/100 | Batch 400/469 | Gen/Dis Loss: 0.7532/0.6745
Time elapsed: 18.15 min
Epoch: 099/100 | Batch 000/469 | Gen/Dis Loss: 0.7019/0.6895
Epoch: 099/100 | Batch 100/469 | Gen/Dis Loss: 0.7424/0.6801
Epoch: 099/100 | Batch 200/469 | Gen/Dis Loss: 0.7447/0.6812
Epoch: 099/100 | Batch 300/469 | Gen/Dis Loss: 0.7266/0.6907
Epoch: 099/100 | Batch 400/469 | Gen/Dis Loss: 0.7336/0.6844
Time elapsed: 18.33 min
Epoch: 100/100 | Batch 000/469 | Gen/Dis Loss: 0.7321/0.6940
Epoch: 100/100 | Batch 100/469 | Gen/Dis Loss: 0.6930/0.6972
Epoch: 100/100 | Batch 200/469 | Gen/Dis Loss: 0.6985/0.6913
Epoch: 100/100 | Batch 300/469 | Gen/Dis Loss: 0.7279/0.6904
Epoch: 100/100 | Batch 400/469 | Gen/Dis Loss: 0.7286/0.7083
Time elapsed: 18.47 min
Total Training Time: 18.47 min
In [9]:
### For Debugging

"""
for i in outputs:
    print(i.size())
"""
Out[9]:
'\nfor i in outputs:\n    print(i.size())\n'

Evaluation

In [10]:
%matplotlib inline
import matplotlib.pyplot as plt
In [11]:
ax1 = plt.subplot(1, 1, 1)
ax1.plot(range(len(gener_costs)), gener_costs, label='Generator loss')
ax1.plot(range(len(discr_costs)), discr_costs, label='Discriminator loss')
ax1.set_xlabel('Iterations')
ax1.set_ylabel('Loss')
ax1.legend()

###################
# Set scond x-axis
ax2 = ax1.twiny()
newlabel = list(range(NUM_EPOCHS+1))
iter_per_epoch = len(train_loader)
newpos = [e*iter_per_epoch for e in newlabel]

ax2.set_xticklabels(newlabel[::10])
ax2.set_xticks(newpos[::10])

ax2.xaxis.set_ticks_position('bottom')
ax2.xaxis.set_label_position('bottom')
ax2.spines['bottom'].set_position(('outward', 45))
ax2.set_xlabel('Epochs')
ax2.set_xlim(ax1.get_xlim())
###################

plt.show()
In [12]:
##########################
### VISUALIZATION
##########################


model.eval()
# Make new images
z = torch.zeros((5, LATENT_DIM)).uniform_(-1.0, 1.0).to(device)
generated_features = model.generator_forward(z)
imgs = generated_features.view(-1, 28, 28)

fig, axes = plt.subplots(nrows=1, ncols=5, figsize=(20, 2.5))


for i, ax in enumerate(axes):
    axes[i].imshow(imgs[i].to(torch.device('cpu')).detach(), cmap='binary')
In [13]:
from torchsummary import summary
model = model.to('cuda:0')
summary(model.generator, input_size=(100,))
summary(model.discriminator, input_size=(1, 28, 28))
----------------------------------------------------------------
        Layer (type)               Output Shape         Param #
================================================================
            Linear-1                 [-1, 3136]         313,600
       BatchNorm1d-2                 [-1, 3136]           6,272
         LeakyReLU-3                 [-1, 3136]               0
          Reshape1-4             [-1, 64, 7, 7]               0
   ConvTranspose2d-5           [-1, 32, 13, 13]          18,432
       BatchNorm2d-6           [-1, 32, 13, 13]              64
         LeakyReLU-7           [-1, 32, 13, 13]               0
   ConvTranspose2d-8           [-1, 16, 25, 25]           4,608
       BatchNorm2d-9           [-1, 16, 25, 25]              32
        LeakyReLU-10           [-1, 16, 25, 25]               0
  ConvTranspose2d-11            [-1, 8, 27, 27]           1,152
      BatchNorm2d-12            [-1, 8, 27, 27]              16
        LeakyReLU-13            [-1, 8, 27, 27]               0
  ConvTranspose2d-14            [-1, 1, 28, 28]              32
             Tanh-15            [-1, 1, 28, 28]               0
================================================================
Total params: 344,208
Trainable params: 344,208
Non-trainable params: 0
----------------------------------------------------------------
Input size (MB): 0.00
Forward/backward pass size (MB): 0.59
Params size (MB): 1.31
Estimated Total Size (MB): 1.91
----------------------------------------------------------------
----------------------------------------------------------------
        Layer (type)               Output Shape         Param #
================================================================
            Conv2d-1            [-1, 8, 14, 14]              72
       BatchNorm2d-2            [-1, 8, 14, 14]              16
         LeakyReLU-3            [-1, 8, 14, 14]               0
            Conv2d-4             [-1, 32, 7, 7]           2,304
       BatchNorm2d-5             [-1, 32, 7, 7]              64
         LeakyReLU-6             [-1, 32, 7, 7]               0
           Flatten-7                 [-1, 1568]               0
            Linear-8                    [-1, 1]           1,569
================================================================
Total params: 4,025
Trainable params: 4,025
Non-trainable params: 0
----------------------------------------------------------------
Input size (MB): 0.00
Forward/backward pass size (MB): 0.08
Params size (MB): 0.02
Estimated Total Size (MB): 0.10
----------------------------------------------------------------