Deep Learning Models -- A collection of various deep learning architectures, models, and tips for TensorFlow and PyTorch in Jupyter Notebooks.

In [1]:
%load_ext watermark
%watermark -a 'Sebastian Raschka' -v -p torch
Sebastian Raschka 

CPython 3.7.3
IPython 7.6.1

torch 1.2.0
  • Runs on CPU or GPU (if available)

Model Zoo -- Generative Adversarial Networks (GAN)

Implementation of a standard GAN.

Imports

In [2]:
import time
import numpy as np
import torch
import torch.nn.functional as F
from torchvision import datasets
from torchvision import transforms
import torch.nn as nn
from torch.utils.data import DataLoader


if torch.cuda.is_available():
    torch.backends.cudnn.deterministic = True

Settings and Dataset

In [3]:
##########################
### SETTINGS
##########################

# Device
device = torch.device("cuda:2" if torch.cuda.is_available() else "cpu")

# Hyperparameters
random_seed = 123
generator_learning_rate = 0.001
discriminator_learning_rate = 0.001
NUM_EPOCHS = 100
BATCH_SIZE = 128
LATENT_DIM = 75
IMG_SHAPE = (1, 28, 28)
IMG_SIZE = 1
for x in IMG_SHAPE:
    IMG_SIZE *= x



##########################
### MNIST DATASET
##########################

# Note transforms.ToTensor() scales input images
# to 0-1 range
train_dataset = datasets.MNIST(root='data', 
                               train=True, 
                               transform=transforms.ToTensor(),
                               download=True)

test_dataset = datasets.MNIST(root='data', 
                              train=False, 
                              transform=transforms.ToTensor())


train_loader = DataLoader(dataset=train_dataset, 
                          batch_size=BATCH_SIZE, 
                          shuffle=True)

test_loader = DataLoader(dataset=test_dataset, 
                         batch_size=BATCH_SIZE, 
                         shuffle=False)

# Checking the dataset
for images, labels in train_loader:  
    print('Image batch dimensions:', images.shape)
    print('Image label dimensions:', labels.shape)
    break
Image batch dimensions: torch.Size([128, 1, 28, 28])
Image label dimensions: torch.Size([128])

Model

In [4]:
##########################
### MODEL
##########################


class GAN(torch.nn.Module):

    def __init__(self):
        super(GAN, self).__init__()
        
        
        self.generator = nn.Sequential(
            nn.Linear(LATENT_DIM, 128),
            nn.LeakyReLU(inplace=True),
            nn.Dropout(p=0.5),
            nn.Linear(128, IMG_SIZE),
            nn.Tanh()
        )
        
        self.discriminator = nn.Sequential(
            nn.Linear(IMG_SIZE, 128),
            nn.LeakyReLU(inplace=True),
            nn.Dropout(p=0.5),
            nn.Linear(128, 1),
            nn.Sigmoid()
        )

            
    def generator_forward(self, z):
        img = self.generator(z)
        return img
    
    def discriminator_forward(self, img):
        pred = model.discriminator(img)
        return pred.view(-1)
In [5]:
torch.manual_seed(random_seed)

model = GAN()
model = model.to(device)

optim_gener = torch.optim.Adam(model.generator.parameters(), lr=generator_learning_rate)
optim_discr = torch.optim.Adam(model.discriminator.parameters(), lr=discriminator_learning_rate)

Training

In [6]:
start_time = time.time()    

discr_costs = []
gener_costs = []
for epoch in range(NUM_EPOCHS):
    model = model.train()
    for batch_idx, (features, targets) in enumerate(train_loader):

        
        
        features = (features - 0.5)*2.
        features = features.view(-1, IMG_SIZE).to(device) 
        targets = targets.to(device)

        valid = torch.ones(targets.size(0)).float().to(device)
        fake = torch.zeros(targets.size(0)).float().to(device)
        

        ### FORWARD AND BACK PROP
        
        
        # --------------------------
        # Train Generator
        # --------------------------
        
        # Make new images
        z = torch.zeros((targets.size(0), LATENT_DIM)).uniform_(-1.0, 1.0).to(device)
        generated_features = model.generator_forward(z)
        
        # Loss for fooling the discriminator
        discr_pred = model.discriminator_forward(generated_features)
        
        gener_loss = F.binary_cross_entropy(discr_pred, valid)
        
        optim_gener.zero_grad()
        gener_loss.backward()
        optim_gener.step()
        
        # --------------------------
        # Train Discriminator
        # --------------------------        
        
        discr_pred_real = model.discriminator_forward(features.view(-1, IMG_SIZE))
        real_loss = F.binary_cross_entropy(discr_pred_real, valid)
        
        discr_pred_fake = model.discriminator_forward(generated_features.detach())
        fake_loss = F.binary_cross_entropy(discr_pred_fake, fake)
        
        discr_loss = 0.5*(real_loss + fake_loss)

        optim_discr.zero_grad()
        discr_loss.backward()
        optim_discr.step()        
        
        discr_costs.append(discr_loss)
        gener_costs.append(gener_loss)
        
        
        ### LOGGING
        if not batch_idx % 100:
            print ('Epoch: %03d/%03d | Batch %03d/%03d | Gen/Dis Loss: %.4f/%.4f' 
                   %(epoch+1, NUM_EPOCHS, batch_idx, 
                     len(train_loader), gener_loss, discr_loss))

    print('Time elapsed: %.2f min' % ((time.time() - start_time)/60))
    
print('Total Training Time: %.2f min' % ((time.time() - start_time)/60))
Epoch: 001/100 | Batch 000/469 | Gen/Dis Loss: 0.6576/0.7134
Epoch: 001/100 | Batch 100/469 | Gen/Dis Loss: 5.1797/0.0280
Epoch: 001/100 | Batch 200/469 | Gen/Dis Loss: 1.8944/0.0933
Epoch: 001/100 | Batch 300/469 | Gen/Dis Loss: 1.5018/0.1451
Epoch: 001/100 | Batch 400/469 | Gen/Dis Loss: 2.0884/0.1026
Time elapsed: 0.27 min
Epoch: 002/100 | Batch 000/469 | Gen/Dis Loss: 2.8803/0.0496
Epoch: 002/100 | Batch 100/469 | Gen/Dis Loss: 3.4923/0.0483
Epoch: 002/100 | Batch 200/469 | Gen/Dis Loss: 2.9812/0.1615
Epoch: 002/100 | Batch 300/469 | Gen/Dis Loss: 2.2371/0.1658
Epoch: 002/100 | Batch 400/469 | Gen/Dis Loss: 1.7027/0.2905
Time elapsed: 0.51 min
Epoch: 003/100 | Batch 000/469 | Gen/Dis Loss: 1.2188/0.3533
Epoch: 003/100 | Batch 100/469 | Gen/Dis Loss: 1.8254/0.2083
Epoch: 003/100 | Batch 200/469 | Gen/Dis Loss: 1.9774/0.2238
Epoch: 003/100 | Batch 300/469 | Gen/Dis Loss: 1.9323/0.2806
Epoch: 003/100 | Batch 400/469 | Gen/Dis Loss: 1.9518/0.2712
Time elapsed: 0.77 min
Epoch: 004/100 | Batch 000/469 | Gen/Dis Loss: 1.2785/0.3455
Epoch: 004/100 | Batch 100/469 | Gen/Dis Loss: 1.3979/0.3208
Epoch: 004/100 | Batch 200/469 | Gen/Dis Loss: 1.4295/0.3638
Epoch: 004/100 | Batch 300/469 | Gen/Dis Loss: 1.2798/0.3620
Epoch: 004/100 | Batch 400/469 | Gen/Dis Loss: 1.1321/0.4751
Time elapsed: 1.04 min
Epoch: 005/100 | Batch 000/469 | Gen/Dis Loss: 1.1786/0.3932
Epoch: 005/100 | Batch 100/469 | Gen/Dis Loss: 1.1437/0.4343
Epoch: 005/100 | Batch 200/469 | Gen/Dis Loss: 1.0105/0.4453
Epoch: 005/100 | Batch 300/469 | Gen/Dis Loss: 1.3987/0.4194
Epoch: 005/100 | Batch 400/469 | Gen/Dis Loss: 1.3960/0.4005
Time elapsed: 1.28 min
Epoch: 006/100 | Batch 000/469 | Gen/Dis Loss: 1.3119/0.4792
Epoch: 006/100 | Batch 100/469 | Gen/Dis Loss: 1.6029/0.4045
Epoch: 006/100 | Batch 200/469 | Gen/Dis Loss: 1.6302/0.3768
Epoch: 006/100 | Batch 300/469 | Gen/Dis Loss: 0.9141/0.4838
Epoch: 006/100 | Batch 400/469 | Gen/Dis Loss: 0.9891/0.4810
Time elapsed: 1.56 min
Epoch: 007/100 | Batch 000/469 | Gen/Dis Loss: 1.3198/0.4820
Epoch: 007/100 | Batch 100/469 | Gen/Dis Loss: 1.1527/0.4620
Epoch: 007/100 | Batch 200/469 | Gen/Dis Loss: 1.3668/0.3967
Epoch: 007/100 | Batch 300/469 | Gen/Dis Loss: 1.6183/0.4676
Epoch: 007/100 | Batch 400/469 | Gen/Dis Loss: 1.0077/0.4841
Time elapsed: 1.85 min
Epoch: 008/100 | Batch 000/469 | Gen/Dis Loss: 1.2245/0.5437
Epoch: 008/100 | Batch 100/469 | Gen/Dis Loss: 1.0142/0.4928
Epoch: 008/100 | Batch 200/469 | Gen/Dis Loss: 0.8817/0.4939
Epoch: 008/100 | Batch 300/469 | Gen/Dis Loss: 1.0748/0.4967
Epoch: 008/100 | Batch 400/469 | Gen/Dis Loss: 2.1265/0.4329
Time elapsed: 2.11 min
Epoch: 009/100 | Batch 000/469 | Gen/Dis Loss: 0.9277/0.4871
Epoch: 009/100 | Batch 100/469 | Gen/Dis Loss: 1.1624/0.4473
Epoch: 009/100 | Batch 200/469 | Gen/Dis Loss: 1.1869/0.4800
Epoch: 009/100 | Batch 300/469 | Gen/Dis Loss: 1.9998/0.4295
Epoch: 009/100 | Batch 400/469 | Gen/Dis Loss: 1.6921/0.5037
Time elapsed: 2.34 min
Epoch: 010/100 | Batch 000/469 | Gen/Dis Loss: 1.3091/0.4358
Epoch: 010/100 | Batch 100/469 | Gen/Dis Loss: 1.2604/0.5375
Epoch: 010/100 | Batch 200/469 | Gen/Dis Loss: 1.1491/0.4537
Epoch: 010/100 | Batch 300/469 | Gen/Dis Loss: 1.3843/0.5068
Epoch: 010/100 | Batch 400/469 | Gen/Dis Loss: 1.3413/0.5051
Time elapsed: 2.60 min
Epoch: 011/100 | Batch 000/469 | Gen/Dis Loss: 1.2368/0.5161
Epoch: 011/100 | Batch 100/469 | Gen/Dis Loss: 1.3715/0.4692
Epoch: 011/100 | Batch 200/469 | Gen/Dis Loss: 1.1182/0.5274
Epoch: 011/100 | Batch 300/469 | Gen/Dis Loss: 1.2770/0.4649
Epoch: 011/100 | Batch 400/469 | Gen/Dis Loss: 1.1847/0.5504
Time elapsed: 2.84 min
Epoch: 012/100 | Batch 000/469 | Gen/Dis Loss: 0.9930/0.5509
Epoch: 012/100 | Batch 100/469 | Gen/Dis Loss: 1.1921/0.5310
Epoch: 012/100 | Batch 200/469 | Gen/Dis Loss: 0.9925/0.6062
Epoch: 012/100 | Batch 300/469 | Gen/Dis Loss: 1.1246/0.5170
Epoch: 012/100 | Batch 400/469 | Gen/Dis Loss: 1.0432/0.4437
Time elapsed: 3.07 min
Epoch: 013/100 | Batch 000/469 | Gen/Dis Loss: 1.1419/0.5287
Epoch: 013/100 | Batch 100/469 | Gen/Dis Loss: 1.0053/0.5152
Epoch: 013/100 | Batch 200/469 | Gen/Dis Loss: 1.1308/0.5384
Epoch: 013/100 | Batch 300/469 | Gen/Dis Loss: 1.1822/0.5124
Epoch: 013/100 | Batch 400/469 | Gen/Dis Loss: 1.4501/0.5495
Time elapsed: 3.32 min
Epoch: 014/100 | Batch 000/469 | Gen/Dis Loss: 1.1417/0.5364
Epoch: 014/100 | Batch 100/469 | Gen/Dis Loss: 0.9595/0.5884
Epoch: 014/100 | Batch 200/469 | Gen/Dis Loss: 0.9887/0.5216
Epoch: 014/100 | Batch 300/469 | Gen/Dis Loss: 1.0332/0.5686
Epoch: 014/100 | Batch 400/469 | Gen/Dis Loss: 1.5268/0.4554
Time elapsed: 3.60 min
Epoch: 015/100 | Batch 000/469 | Gen/Dis Loss: 1.1181/0.4960
Epoch: 015/100 | Batch 100/469 | Gen/Dis Loss: 1.2722/0.4632
Epoch: 015/100 | Batch 200/469 | Gen/Dis Loss: 0.9523/0.6012
Epoch: 015/100 | Batch 300/469 | Gen/Dis Loss: 0.9905/0.5274
Epoch: 015/100 | Batch 400/469 | Gen/Dis Loss: 1.0448/0.5855
Time elapsed: 3.82 min
Epoch: 016/100 | Batch 000/469 | Gen/Dis Loss: 1.0641/0.5432
Epoch: 016/100 | Batch 100/469 | Gen/Dis Loss: 0.9587/0.5636
Epoch: 016/100 | Batch 200/469 | Gen/Dis Loss: 1.3602/0.5691
Epoch: 016/100 | Batch 300/469 | Gen/Dis Loss: 1.1294/0.5564
Epoch: 016/100 | Batch 400/469 | Gen/Dis Loss: 1.0727/0.5042
Time elapsed: 4.04 min
Epoch: 017/100 | Batch 000/469 | Gen/Dis Loss: 0.9285/0.6045
Epoch: 017/100 | Batch 100/469 | Gen/Dis Loss: 1.0024/0.6384
Epoch: 017/100 | Batch 200/469 | Gen/Dis Loss: 1.5662/0.4652
Epoch: 017/100 | Batch 300/469 | Gen/Dis Loss: 1.3644/0.4632
Epoch: 017/100 | Batch 400/469 | Gen/Dis Loss: 1.2681/0.5238
Time elapsed: 4.22 min
Epoch: 018/100 | Batch 000/469 | Gen/Dis Loss: 1.2578/0.5151
Epoch: 018/100 | Batch 100/469 | Gen/Dis Loss: 1.6475/0.4929
Epoch: 018/100 | Batch 200/469 | Gen/Dis Loss: 1.0610/0.5496
Epoch: 018/100 | Batch 300/469 | Gen/Dis Loss: 1.0613/0.5634
Epoch: 018/100 | Batch 400/469 | Gen/Dis Loss: 1.4675/0.4589
Time elapsed: 4.40 min
Epoch: 019/100 | Batch 000/469 | Gen/Dis Loss: 1.1211/0.5027
Epoch: 019/100 | Batch 100/469 | Gen/Dis Loss: 1.1444/0.5655
Epoch: 019/100 | Batch 200/469 | Gen/Dis Loss: 1.2471/0.5716
Epoch: 019/100 | Batch 300/469 | Gen/Dis Loss: 1.0223/0.5106
Epoch: 019/100 | Batch 400/469 | Gen/Dis Loss: 1.0361/0.5805
Time elapsed: 4.58 min
Epoch: 020/100 | Batch 000/469 | Gen/Dis Loss: 0.9195/0.5428
Epoch: 020/100 | Batch 100/469 | Gen/Dis Loss: 1.3110/0.4955
Epoch: 020/100 | Batch 200/469 | Gen/Dis Loss: 1.2449/0.4973
Epoch: 020/100 | Batch 300/469 | Gen/Dis Loss: 1.3258/0.4992
Epoch: 020/100 | Batch 400/469 | Gen/Dis Loss: 1.2196/0.5279
Time elapsed: 4.77 min
Epoch: 021/100 | Batch 000/469 | Gen/Dis Loss: 1.5621/0.5584
Epoch: 021/100 | Batch 100/469 | Gen/Dis Loss: 1.1148/0.5888
Epoch: 021/100 | Batch 200/469 | Gen/Dis Loss: 1.5108/0.4636
Epoch: 021/100 | Batch 300/469 | Gen/Dis Loss: 1.0957/0.4912
Epoch: 021/100 | Batch 400/469 | Gen/Dis Loss: 1.0342/0.5184
Time elapsed: 4.92 min
Epoch: 022/100 | Batch 000/469 | Gen/Dis Loss: 1.9312/0.4366
Epoch: 022/100 | Batch 100/469 | Gen/Dis Loss: 1.2312/0.5260
Epoch: 022/100 | Batch 200/469 | Gen/Dis Loss: 1.1939/0.5075
Epoch: 022/100 | Batch 300/469 | Gen/Dis Loss: 1.1393/0.5692
Epoch: 022/100 | Batch 400/469 | Gen/Dis Loss: 1.0390/0.5261
Time elapsed: 5.05 min
Epoch: 023/100 | Batch 000/469 | Gen/Dis Loss: 1.3148/0.4902
Epoch: 023/100 | Batch 100/469 | Gen/Dis Loss: 1.2077/0.6129
Epoch: 023/100 | Batch 200/469 | Gen/Dis Loss: 1.0886/0.5545
Epoch: 023/100 | Batch 300/469 | Gen/Dis Loss: 1.0762/0.4948
Epoch: 023/100 | Batch 400/469 | Gen/Dis Loss: 1.5361/0.5476
Time elapsed: 5.17 min
Epoch: 024/100 | Batch 000/469 | Gen/Dis Loss: 1.1752/0.5881
Epoch: 024/100 | Batch 100/469 | Gen/Dis Loss: 1.3408/0.5339
Epoch: 024/100 | Batch 200/469 | Gen/Dis Loss: 1.2613/0.4555
Epoch: 024/100 | Batch 300/469 | Gen/Dis Loss: 1.0707/0.5099
Epoch: 024/100 | Batch 400/469 | Gen/Dis Loss: 1.1063/0.5695
Time elapsed: 5.32 min
Epoch: 025/100 | Batch 000/469 | Gen/Dis Loss: 1.2911/0.5084
Epoch: 025/100 | Batch 100/469 | Gen/Dis Loss: 1.1280/0.5151
Epoch: 025/100 | Batch 200/469 | Gen/Dis Loss: 1.3799/0.5784
Epoch: 025/100 | Batch 300/469 | Gen/Dis Loss: 1.1675/0.6001
Epoch: 025/100 | Batch 400/469 | Gen/Dis Loss: 0.9834/0.6158
Time elapsed: 5.48 min
Epoch: 026/100 | Batch 000/469 | Gen/Dis Loss: 1.2713/0.5475
Epoch: 026/100 | Batch 100/469 | Gen/Dis Loss: 1.3814/0.5652
Epoch: 026/100 | Batch 200/469 | Gen/Dis Loss: 1.1782/0.4850
Epoch: 026/100 | Batch 300/469 | Gen/Dis Loss: 0.9917/0.5888
Epoch: 026/100 | Batch 400/469 | Gen/Dis Loss: 1.0909/0.5825
Time elapsed: 5.64 min
Epoch: 027/100 | Batch 000/469 | Gen/Dis Loss: 1.0873/0.5579
Epoch: 027/100 | Batch 100/469 | Gen/Dis Loss: 0.9639/0.5860
Epoch: 027/100 | Batch 200/469 | Gen/Dis Loss: 1.0458/0.5526
Epoch: 027/100 | Batch 300/469 | Gen/Dis Loss: 1.3373/0.5140
Epoch: 027/100 | Batch 400/469 | Gen/Dis Loss: 1.2790/0.5223
Time elapsed: 5.79 min
Epoch: 028/100 | Batch 000/469 | Gen/Dis Loss: 0.9300/0.5869
Epoch: 028/100 | Batch 100/469 | Gen/Dis Loss: 1.0022/0.6056
Epoch: 028/100 | Batch 200/469 | Gen/Dis Loss: 1.0688/0.5447
Epoch: 028/100 | Batch 300/469 | Gen/Dis Loss: 1.0161/0.5702
Epoch: 028/100 | Batch 400/469 | Gen/Dis Loss: 0.8731/0.5543
Time elapsed: 5.92 min
Epoch: 029/100 | Batch 000/469 | Gen/Dis Loss: 0.8719/0.5524
Epoch: 029/100 | Batch 100/469 | Gen/Dis Loss: 1.3005/0.5179
Epoch: 029/100 | Batch 200/469 | Gen/Dis Loss: 1.2986/0.5312
Epoch: 029/100 | Batch 300/469 | Gen/Dis Loss: 1.1084/0.5207
Epoch: 029/100 | Batch 400/469 | Gen/Dis Loss: 1.0591/0.5577
Time elapsed: 6.07 min
Epoch: 030/100 | Batch 000/469 | Gen/Dis Loss: 1.0231/0.6170
Epoch: 030/100 | Batch 100/469 | Gen/Dis Loss: 0.9142/0.6046
Epoch: 030/100 | Batch 200/469 | Gen/Dis Loss: 1.2140/0.5290
Epoch: 030/100 | Batch 300/469 | Gen/Dis Loss: 0.8784/0.5804
Epoch: 030/100 | Batch 400/469 | Gen/Dis Loss: 1.1178/0.5165
Time elapsed: 6.20 min
Epoch: 031/100 | Batch 000/469 | Gen/Dis Loss: 0.9555/0.5921
Epoch: 031/100 | Batch 100/469 | Gen/Dis Loss: 0.9644/0.5432
Epoch: 031/100 | Batch 200/469 | Gen/Dis Loss: 0.9531/0.5465
Epoch: 031/100 | Batch 300/469 | Gen/Dis Loss: 1.3496/0.5550
Epoch: 031/100 | Batch 400/469 | Gen/Dis Loss: 1.2137/0.5672
Time elapsed: 6.32 min
Epoch: 032/100 | Batch 000/469 | Gen/Dis Loss: 1.0849/0.5020
Epoch: 032/100 | Batch 100/469 | Gen/Dis Loss: 0.9098/0.5481
Epoch: 032/100 | Batch 200/469 | Gen/Dis Loss: 1.2349/0.5024
Epoch: 032/100 | Batch 300/469 | Gen/Dis Loss: 0.9468/0.5599
Epoch: 032/100 | Batch 400/469 | Gen/Dis Loss: 1.4531/0.4928
Time elapsed: 6.45 min
Epoch: 033/100 | Batch 000/469 | Gen/Dis Loss: 1.3397/0.5521
Epoch: 033/100 | Batch 100/469 | Gen/Dis Loss: 1.0106/0.5472
Epoch: 033/100 | Batch 200/469 | Gen/Dis Loss: 0.9787/0.5606
Epoch: 033/100 | Batch 300/469 | Gen/Dis Loss: 1.1434/0.5388
Epoch: 033/100 | Batch 400/469 | Gen/Dis Loss: 1.0476/0.5259
Time elapsed: 6.57 min
Epoch: 034/100 | Batch 000/469 | Gen/Dis Loss: 1.3847/0.5294
Epoch: 034/100 | Batch 100/469 | Gen/Dis Loss: 0.8550/0.5800
Epoch: 034/100 | Batch 200/469 | Gen/Dis Loss: 1.0220/0.5527
Epoch: 034/100 | Batch 300/469 | Gen/Dis Loss: 0.9255/0.5751
Epoch: 034/100 | Batch 400/469 | Gen/Dis Loss: 1.0400/0.5554
Time elapsed: 6.72 min
Epoch: 035/100 | Batch 000/469 | Gen/Dis Loss: 0.9723/0.5789
Epoch: 035/100 | Batch 100/469 | Gen/Dis Loss: 1.4414/0.4769
Epoch: 035/100 | Batch 200/469 | Gen/Dis Loss: 0.9431/0.5898
Epoch: 035/100 | Batch 300/469 | Gen/Dis Loss: 0.8252/0.6573
Epoch: 035/100 | Batch 400/469 | Gen/Dis Loss: 0.9694/0.5427
Time elapsed: 6.84 min
Epoch: 036/100 | Batch 000/469 | Gen/Dis Loss: 1.3664/0.5839
Epoch: 036/100 | Batch 100/469 | Gen/Dis Loss: 1.0854/0.5739
Epoch: 036/100 | Batch 200/469 | Gen/Dis Loss: 1.0429/0.5457
Epoch: 036/100 | Batch 300/469 | Gen/Dis Loss: 0.8601/0.6151
Epoch: 036/100 | Batch 400/469 | Gen/Dis Loss: 1.2785/0.5850
Time elapsed: 6.97 min
Epoch: 037/100 | Batch 000/469 | Gen/Dis Loss: 1.0251/0.5933
Epoch: 037/100 | Batch 100/469 | Gen/Dis Loss: 1.2177/0.5053
Epoch: 037/100 | Batch 200/469 | Gen/Dis Loss: 0.8804/0.5925
Epoch: 037/100 | Batch 300/469 | Gen/Dis Loss: 1.2797/0.6173
Epoch: 037/100 | Batch 400/469 | Gen/Dis Loss: 0.9189/0.6238
Time elapsed: 7.10 min
Epoch: 038/100 | Batch 000/469 | Gen/Dis Loss: 1.3463/0.5419
Epoch: 038/100 | Batch 100/469 | Gen/Dis Loss: 1.0166/0.6045
Epoch: 038/100 | Batch 200/469 | Gen/Dis Loss: 0.9895/0.6320
Epoch: 038/100 | Batch 300/469 | Gen/Dis Loss: 0.9749/0.5621
Epoch: 038/100 | Batch 400/469 | Gen/Dis Loss: 1.0448/0.5945
Time elapsed: 7.24 min
Epoch: 039/100 | Batch 000/469 | Gen/Dis Loss: 0.9662/0.5669
Epoch: 039/100 | Batch 100/469 | Gen/Dis Loss: 1.1476/0.5462
Epoch: 039/100 | Batch 200/469 | Gen/Dis Loss: 0.9662/0.5554
Epoch: 039/100 | Batch 300/469 | Gen/Dis Loss: 1.0850/0.6031
Epoch: 039/100 | Batch 400/469 | Gen/Dis Loss: 1.1491/0.6014
Time elapsed: 7.41 min
Epoch: 040/100 | Batch 000/469 | Gen/Dis Loss: 0.9942/0.5999
Epoch: 040/100 | Batch 100/469 | Gen/Dis Loss: 0.9034/0.5979
Epoch: 040/100 | Batch 200/469 | Gen/Dis Loss: 1.1880/0.5693
Epoch: 040/100 | Batch 300/469 | Gen/Dis Loss: 1.0893/0.5933
Epoch: 040/100 | Batch 400/469 | Gen/Dis Loss: 1.0711/0.5501
Time elapsed: 7.59 min
Epoch: 041/100 | Batch 000/469 | Gen/Dis Loss: 0.9100/0.5957
Epoch: 041/100 | Batch 100/469 | Gen/Dis Loss: 0.7538/0.5947
Epoch: 041/100 | Batch 200/469 | Gen/Dis Loss: 0.9743/0.5999
Epoch: 041/100 | Batch 300/469 | Gen/Dis Loss: 0.8305/0.6395
Epoch: 041/100 | Batch 400/469 | Gen/Dis Loss: 1.1106/0.6419
Time elapsed: 7.73 min
Epoch: 042/100 | Batch 000/469 | Gen/Dis Loss: 1.1241/0.5890
Epoch: 042/100 | Batch 100/469 | Gen/Dis Loss: 0.8509/0.6164
Epoch: 042/100 | Batch 200/469 | Gen/Dis Loss: 1.2024/0.5684
Epoch: 042/100 | Batch 300/469 | Gen/Dis Loss: 0.9708/0.6378
Epoch: 042/100 | Batch 400/469 | Gen/Dis Loss: 1.1171/0.5501
Time elapsed: 7.85 min
Epoch: 043/100 | Batch 000/469 | Gen/Dis Loss: 1.0931/0.5653
Epoch: 043/100 | Batch 100/469 | Gen/Dis Loss: 1.0468/0.5782
Epoch: 043/100 | Batch 200/469 | Gen/Dis Loss: 1.0359/0.6329
Epoch: 043/100 | Batch 300/469 | Gen/Dis Loss: 1.1976/0.6114
Epoch: 043/100 | Batch 400/469 | Gen/Dis Loss: 0.8817/0.6200
Time elapsed: 7.98 min
Epoch: 044/100 | Batch 000/469 | Gen/Dis Loss: 0.9911/0.6061
Epoch: 044/100 | Batch 100/469 | Gen/Dis Loss: 1.0196/0.6435
Epoch: 044/100 | Batch 200/469 | Gen/Dis Loss: 1.0005/0.6266
Epoch: 044/100 | Batch 300/469 | Gen/Dis Loss: 0.8342/0.6092
Epoch: 044/100 | Batch 400/469 | Gen/Dis Loss: 0.8342/0.5589
Time elapsed: 8.10 min
Epoch: 045/100 | Batch 000/469 | Gen/Dis Loss: 0.7638/0.6289
Epoch: 045/100 | Batch 100/469 | Gen/Dis Loss: 0.9049/0.5920
Epoch: 045/100 | Batch 200/469 | Gen/Dis Loss: 1.0077/0.5975
Epoch: 045/100 | Batch 300/469 | Gen/Dis Loss: 0.9315/0.6066
Epoch: 045/100 | Batch 400/469 | Gen/Dis Loss: 0.7719/0.6624
Time elapsed: 8.23 min
Epoch: 046/100 | Batch 000/469 | Gen/Dis Loss: 1.0064/0.5672
Epoch: 046/100 | Batch 100/469 | Gen/Dis Loss: 0.8730/0.6217
Epoch: 046/100 | Batch 200/469 | Gen/Dis Loss: 1.2217/0.5859
Epoch: 046/100 | Batch 300/469 | Gen/Dis Loss: 1.1649/0.5878
Epoch: 046/100 | Batch 400/469 | Gen/Dis Loss: 0.9912/0.5882
Time elapsed: 8.35 min
Epoch: 047/100 | Batch 000/469 | Gen/Dis Loss: 0.8579/0.6209
Epoch: 047/100 | Batch 100/469 | Gen/Dis Loss: 1.0072/0.5908
Epoch: 047/100 | Batch 200/469 | Gen/Dis Loss: 0.8694/0.6285
Epoch: 047/100 | Batch 300/469 | Gen/Dis Loss: 0.9354/0.6087
Epoch: 047/100 | Batch 400/469 | Gen/Dis Loss: 0.8800/0.6521
Time elapsed: 8.48 min
Epoch: 048/100 | Batch 000/469 | Gen/Dis Loss: 0.8513/0.6051
Epoch: 048/100 | Batch 100/469 | Gen/Dis Loss: 0.8803/0.6090
Epoch: 048/100 | Batch 200/469 | Gen/Dis Loss: 1.0930/0.6115
Epoch: 048/100 | Batch 300/469 | Gen/Dis Loss: 0.7406/0.6692
Epoch: 048/100 | Batch 400/469 | Gen/Dis Loss: 0.8551/0.6188
Time elapsed: 8.62 min
Epoch: 049/100 | Batch 000/469 | Gen/Dis Loss: 0.8792/0.5986
Epoch: 049/100 | Batch 100/469 | Gen/Dis Loss: 0.8424/0.6277
Epoch: 049/100 | Batch 200/469 | Gen/Dis Loss: 0.7973/0.6320
Epoch: 049/100 | Batch 300/469 | Gen/Dis Loss: 0.9188/0.5828
Epoch: 049/100 | Batch 400/469 | Gen/Dis Loss: 0.9253/0.6013
Time elapsed: 8.80 min
Epoch: 050/100 | Batch 000/469 | Gen/Dis Loss: 1.3241/0.5689
Epoch: 050/100 | Batch 100/469 | Gen/Dis Loss: 1.0220/0.5922
Epoch: 050/100 | Batch 200/469 | Gen/Dis Loss: 0.9210/0.6024
Epoch: 050/100 | Batch 300/469 | Gen/Dis Loss: 0.8139/0.6578
Epoch: 050/100 | Batch 400/469 | Gen/Dis Loss: 1.0371/0.5987
Time elapsed: 8.93 min
Epoch: 051/100 | Batch 000/469 | Gen/Dis Loss: 0.9253/0.6002
Epoch: 051/100 | Batch 100/469 | Gen/Dis Loss: 0.8154/0.5774
Epoch: 051/100 | Batch 200/469 | Gen/Dis Loss: 0.9697/0.6240
Epoch: 051/100 | Batch 300/469 | Gen/Dis Loss: 1.1185/0.5541
Epoch: 051/100 | Batch 400/469 | Gen/Dis Loss: 0.8016/0.6642
Time elapsed: 9.06 min
Epoch: 052/100 | Batch 000/469 | Gen/Dis Loss: 0.8716/0.6364
Epoch: 052/100 | Batch 100/469 | Gen/Dis Loss: 0.9636/0.5944
Epoch: 052/100 | Batch 200/469 | Gen/Dis Loss: 0.9511/0.6204
Epoch: 052/100 | Batch 300/469 | Gen/Dis Loss: 0.9293/0.5901
Epoch: 052/100 | Batch 400/469 | Gen/Dis Loss: 1.1139/0.5535
Time elapsed: 9.18 min
Epoch: 053/100 | Batch 000/469 | Gen/Dis Loss: 0.8345/0.6399
Epoch: 053/100 | Batch 100/469 | Gen/Dis Loss: 1.0420/0.5847
Epoch: 053/100 | Batch 200/469 | Gen/Dis Loss: 0.8887/0.6183
Epoch: 053/100 | Batch 300/469 | Gen/Dis Loss: 1.1280/0.5869
Epoch: 053/100 | Batch 400/469 | Gen/Dis Loss: 0.8391/0.6031
Time elapsed: 9.30 min
Epoch: 054/100 | Batch 000/469 | Gen/Dis Loss: 1.0584/0.5659
Epoch: 054/100 | Batch 100/469 | Gen/Dis Loss: 0.8722/0.5991
Epoch: 054/100 | Batch 200/469 | Gen/Dis Loss: 0.8416/0.6067
Epoch: 054/100 | Batch 300/469 | Gen/Dis Loss: 0.9295/0.5910
Epoch: 054/100 | Batch 400/469 | Gen/Dis Loss: 0.7705/0.6145
Time elapsed: 9.43 min
Epoch: 055/100 | Batch 000/469 | Gen/Dis Loss: 0.9697/0.6207
Epoch: 055/100 | Batch 100/469 | Gen/Dis Loss: 1.3702/0.5782
Epoch: 055/100 | Batch 200/469 | Gen/Dis Loss: 0.8874/0.6034
Epoch: 055/100 | Batch 300/469 | Gen/Dis Loss: 0.9273/0.6095
Epoch: 055/100 | Batch 400/469 | Gen/Dis Loss: 1.0736/0.5893
Time elapsed: 9.57 min
Epoch: 056/100 | Batch 000/469 | Gen/Dis Loss: 0.9631/0.5959
Epoch: 056/100 | Batch 100/469 | Gen/Dis Loss: 0.8657/0.6398
Epoch: 056/100 | Batch 200/469 | Gen/Dis Loss: 0.8120/0.6027
Epoch: 056/100 | Batch 300/469 | Gen/Dis Loss: 1.1529/0.6493
Epoch: 056/100 | Batch 400/469 | Gen/Dis Loss: 0.9172/0.5788
Time elapsed: 9.77 min
Epoch: 057/100 | Batch 000/469 | Gen/Dis Loss: 0.9197/0.6090
Epoch: 057/100 | Batch 100/469 | Gen/Dis Loss: 0.9413/0.6255
Epoch: 057/100 | Batch 200/469 | Gen/Dis Loss: 0.9020/0.5870
Epoch: 057/100 | Batch 300/469 | Gen/Dis Loss: 0.9947/0.5586
Epoch: 057/100 | Batch 400/469 | Gen/Dis Loss: 0.9077/0.6454
Time elapsed: 10.03 min
Epoch: 058/100 | Batch 000/469 | Gen/Dis Loss: 0.8899/0.6106
Epoch: 058/100 | Batch 100/469 | Gen/Dis Loss: 0.8154/0.6554
Epoch: 058/100 | Batch 200/469 | Gen/Dis Loss: 0.9307/0.5997
Epoch: 058/100 | Batch 300/469 | Gen/Dis Loss: 0.8293/0.5881
Epoch: 058/100 | Batch 400/469 | Gen/Dis Loss: 0.9434/0.6448
Time elapsed: 10.31 min
Epoch: 059/100 | Batch 000/469 | Gen/Dis Loss: 0.9638/0.6325
Epoch: 059/100 | Batch 100/469 | Gen/Dis Loss: 0.9374/0.6304
Epoch: 059/100 | Batch 200/469 | Gen/Dis Loss: 0.8452/0.6464
Epoch: 059/100 | Batch 300/469 | Gen/Dis Loss: 1.0170/0.6210
Epoch: 059/100 | Batch 400/469 | Gen/Dis Loss: 0.8808/0.5950
Time elapsed: 10.56 min
Epoch: 060/100 | Batch 000/469 | Gen/Dis Loss: 0.9076/0.5969
Epoch: 060/100 | Batch 100/469 | Gen/Dis Loss: 1.1195/0.6040
Epoch: 060/100 | Batch 200/469 | Gen/Dis Loss: 0.9015/0.6149
Epoch: 060/100 | Batch 300/469 | Gen/Dis Loss: 0.8414/0.5804
Epoch: 060/100 | Batch 400/469 | Gen/Dis Loss: 0.8220/0.6557
Time elapsed: 10.83 min
Epoch: 061/100 | Batch 000/469 | Gen/Dis Loss: 0.8411/0.6360
Epoch: 061/100 | Batch 100/469 | Gen/Dis Loss: 0.8431/0.6304
Epoch: 061/100 | Batch 200/469 | Gen/Dis Loss: 0.7740/0.6395
Epoch: 061/100 | Batch 300/469 | Gen/Dis Loss: 0.8840/0.5987
Epoch: 061/100 | Batch 400/469 | Gen/Dis Loss: 0.8510/0.6232
Time elapsed: 11.07 min
Epoch: 062/100 | Batch 000/469 | Gen/Dis Loss: 1.0286/0.6151
Epoch: 062/100 | Batch 100/469 | Gen/Dis Loss: 1.0516/0.5767
Epoch: 062/100 | Batch 200/469 | Gen/Dis Loss: 0.8182/0.5654
Epoch: 062/100 | Batch 300/469 | Gen/Dis Loss: 0.8658/0.6156
Epoch: 062/100 | Batch 400/469 | Gen/Dis Loss: 0.9674/0.6434
Time elapsed: 11.33 min
Epoch: 063/100 | Batch 000/469 | Gen/Dis Loss: 0.6952/0.6601
Epoch: 063/100 | Batch 100/469 | Gen/Dis Loss: 0.8180/0.6041
Epoch: 063/100 | Batch 200/469 | Gen/Dis Loss: 0.8224/0.6683
Epoch: 063/100 | Batch 300/469 | Gen/Dis Loss: 0.9604/0.5938
Epoch: 063/100 | Batch 400/469 | Gen/Dis Loss: 0.7969/0.6561
Time elapsed: 11.54 min
Epoch: 064/100 | Batch 000/469 | Gen/Dis Loss: 0.8544/0.6290
Epoch: 064/100 | Batch 100/469 | Gen/Dis Loss: 0.8685/0.5925
Epoch: 064/100 | Batch 200/469 | Gen/Dis Loss: 1.4746/0.5992
Epoch: 064/100 | Batch 300/469 | Gen/Dis Loss: 0.8570/0.6417
Epoch: 064/100 | Batch 400/469 | Gen/Dis Loss: 0.8588/0.6461
Time elapsed: 11.78 min
Epoch: 065/100 | Batch 000/469 | Gen/Dis Loss: 0.8579/0.6151
Epoch: 065/100 | Batch 100/469 | Gen/Dis Loss: 0.9720/0.5867
Epoch: 065/100 | Batch 200/469 | Gen/Dis Loss: 0.8870/0.6215
Epoch: 065/100 | Batch 300/469 | Gen/Dis Loss: 0.8184/0.6506
Epoch: 065/100 | Batch 400/469 | Gen/Dis Loss: 0.9247/0.6219
Time elapsed: 12.03 min
Epoch: 066/100 | Batch 000/469 | Gen/Dis Loss: 0.9073/0.6157
Epoch: 066/100 | Batch 100/469 | Gen/Dis Loss: 0.8459/0.6364
Epoch: 066/100 | Batch 200/469 | Gen/Dis Loss: 1.0687/0.5647
Epoch: 066/100 | Batch 300/469 | Gen/Dis Loss: 0.9213/0.6136
Epoch: 066/100 | Batch 400/469 | Gen/Dis Loss: 0.7895/0.6409
Time elapsed: 12.30 min
Epoch: 067/100 | Batch 000/469 | Gen/Dis Loss: 0.8258/0.6246
Epoch: 067/100 | Batch 100/469 | Gen/Dis Loss: 0.9616/0.5776
Epoch: 067/100 | Batch 200/469 | Gen/Dis Loss: 0.9039/0.6012
Epoch: 067/100 | Batch 300/469 | Gen/Dis Loss: 0.9857/0.5949
Epoch: 067/100 | Batch 400/469 | Gen/Dis Loss: 1.1779/0.5773
Time elapsed: 12.58 min
Epoch: 068/100 | Batch 000/469 | Gen/Dis Loss: 0.9631/0.6006
Epoch: 068/100 | Batch 100/469 | Gen/Dis Loss: 0.7157/0.6103
Epoch: 068/100 | Batch 200/469 | Gen/Dis Loss: 0.8400/0.6223
Epoch: 068/100 | Batch 300/469 | Gen/Dis Loss: 1.0586/0.5840
Epoch: 068/100 | Batch 400/469 | Gen/Dis Loss: 0.9487/0.6224
Time elapsed: 12.84 min
Epoch: 069/100 | Batch 000/469 | Gen/Dis Loss: 1.0124/0.5248
Epoch: 069/100 | Batch 100/469 | Gen/Dis Loss: 0.8849/0.6481
Epoch: 069/100 | Batch 200/469 | Gen/Dis Loss: 0.9250/0.6130
Epoch: 069/100 | Batch 300/469 | Gen/Dis Loss: 0.9207/0.6420
Epoch: 069/100 | Batch 400/469 | Gen/Dis Loss: 0.8661/0.6100
Time elapsed: 13.11 min
Epoch: 070/100 | Batch 000/469 | Gen/Dis Loss: 1.0647/0.6247
Epoch: 070/100 | Batch 100/469 | Gen/Dis Loss: 0.8877/0.6254
Epoch: 070/100 | Batch 200/469 | Gen/Dis Loss: 0.8151/0.6462
Epoch: 070/100 | Batch 300/469 | Gen/Dis Loss: 0.8807/0.6079
Epoch: 070/100 | Batch 400/469 | Gen/Dis Loss: 0.9690/0.6432
Time elapsed: 13.34 min
Epoch: 071/100 | Batch 000/469 | Gen/Dis Loss: 0.8764/0.6338
Epoch: 071/100 | Batch 100/469 | Gen/Dis Loss: 0.9052/0.5937
Epoch: 071/100 | Batch 200/469 | Gen/Dis Loss: 1.0023/0.5866
Epoch: 071/100 | Batch 300/469 | Gen/Dis Loss: 0.7945/0.6066
Epoch: 071/100 | Batch 400/469 | Gen/Dis Loss: 0.8566/0.6092
Time elapsed: 13.57 min
Epoch: 072/100 | Batch 000/469 | Gen/Dis Loss: 1.0826/0.5474
Epoch: 072/100 | Batch 100/469 | Gen/Dis Loss: 0.9077/0.6232
Epoch: 072/100 | Batch 200/469 | Gen/Dis Loss: 1.0860/0.6291
Epoch: 072/100 | Batch 300/469 | Gen/Dis Loss: 0.9009/0.6444
Epoch: 072/100 | Batch 400/469 | Gen/Dis Loss: 0.9546/0.6265
Time elapsed: 13.82 min
Epoch: 073/100 | Batch 000/469 | Gen/Dis Loss: 0.9126/0.5977
Epoch: 073/100 | Batch 100/469 | Gen/Dis Loss: 1.0169/0.6357
Epoch: 073/100 | Batch 200/469 | Gen/Dis Loss: 0.8760/0.6333
Epoch: 073/100 | Batch 300/469 | Gen/Dis Loss: 0.8972/0.5929
Epoch: 073/100 | Batch 400/469 | Gen/Dis Loss: 0.9535/0.6609
Time elapsed: 14.05 min
Epoch: 074/100 | Batch 000/469 | Gen/Dis Loss: 0.8905/0.6017
Epoch: 074/100 | Batch 100/469 | Gen/Dis Loss: 0.9040/0.6458
Epoch: 074/100 | Batch 200/469 | Gen/Dis Loss: 0.8277/0.6424
Epoch: 074/100 | Batch 300/469 | Gen/Dis Loss: 1.6138/0.5738
Epoch: 074/100 | Batch 400/469 | Gen/Dis Loss: 0.9943/0.6718
Time elapsed: 14.31 min
Epoch: 075/100 | Batch 000/469 | Gen/Dis Loss: 1.0839/0.6357
Epoch: 075/100 | Batch 100/469 | Gen/Dis Loss: 0.8858/0.6300
Epoch: 075/100 | Batch 200/469 | Gen/Dis Loss: 0.9034/0.6045
Epoch: 075/100 | Batch 300/469 | Gen/Dis Loss: 0.8336/0.5991
Epoch: 075/100 | Batch 400/469 | Gen/Dis Loss: 0.8414/0.6642
Time elapsed: 14.54 min
Epoch: 076/100 | Batch 000/469 | Gen/Dis Loss: 0.8422/0.6506
Epoch: 076/100 | Batch 100/469 | Gen/Dis Loss: 0.8560/0.5884
Epoch: 076/100 | Batch 200/469 | Gen/Dis Loss: 0.8066/0.6215
Epoch: 076/100 | Batch 300/469 | Gen/Dis Loss: 0.7987/0.6537
Epoch: 076/100 | Batch 400/469 | Gen/Dis Loss: 0.8784/0.5854
Time elapsed: 14.82 min
Epoch: 077/100 | Batch 000/469 | Gen/Dis Loss: 0.9845/0.6067
Epoch: 077/100 | Batch 100/469 | Gen/Dis Loss: 0.8514/0.6269
Epoch: 077/100 | Batch 200/469 | Gen/Dis Loss: 1.0448/0.6637
Epoch: 077/100 | Batch 300/469 | Gen/Dis Loss: 0.9325/0.5811
Epoch: 077/100 | Batch 400/469 | Gen/Dis Loss: 0.9169/0.5837
Time elapsed: 15.08 min
Epoch: 078/100 | Batch 000/469 | Gen/Dis Loss: 0.9746/0.6398
Epoch: 078/100 | Batch 100/469 | Gen/Dis Loss: 0.8518/0.6321
Epoch: 078/100 | Batch 200/469 | Gen/Dis Loss: 0.9485/0.5925
Epoch: 078/100 | Batch 300/469 | Gen/Dis Loss: 0.8646/0.6530
Epoch: 078/100 | Batch 400/469 | Gen/Dis Loss: 0.8851/0.6056
Time elapsed: 15.33 min
Epoch: 079/100 | Batch 000/469 | Gen/Dis Loss: 0.9215/0.6184
Epoch: 079/100 | Batch 100/469 | Gen/Dis Loss: 0.8766/0.5987
Epoch: 079/100 | Batch 200/469 | Gen/Dis Loss: 0.9273/0.6339
Epoch: 079/100 | Batch 300/469 | Gen/Dis Loss: 1.0428/0.6016
Epoch: 079/100 | Batch 400/469 | Gen/Dis Loss: 0.8676/0.6156
Time elapsed: 15.63 min
Epoch: 080/100 | Batch 000/469 | Gen/Dis Loss: 0.8753/0.6354
Epoch: 080/100 | Batch 100/469 | Gen/Dis Loss: 0.7689/0.6156
Epoch: 080/100 | Batch 200/469 | Gen/Dis Loss: 0.9524/0.5874
Epoch: 080/100 | Batch 300/469 | Gen/Dis Loss: 1.1452/0.5870
Epoch: 080/100 | Batch 400/469 | Gen/Dis Loss: 0.9418/0.5921
Time elapsed: 15.87 min
Epoch: 081/100 | Batch 000/469 | Gen/Dis Loss: 0.9341/0.5982
Epoch: 081/100 | Batch 100/469 | Gen/Dis Loss: 0.9412/0.6336
Epoch: 081/100 | Batch 200/469 | Gen/Dis Loss: 0.8976/0.6561
Epoch: 081/100 | Batch 300/469 | Gen/Dis Loss: 0.8531/0.6544
Epoch: 081/100 | Batch 400/469 | Gen/Dis Loss: 0.8658/0.6275
Time elapsed: 16.14 min
Epoch: 082/100 | Batch 000/469 | Gen/Dis Loss: 0.8624/0.6454
Epoch: 082/100 | Batch 100/469 | Gen/Dis Loss: 0.8182/0.5911
Epoch: 082/100 | Batch 200/469 | Gen/Dis Loss: 0.8794/0.6080
Epoch: 082/100 | Batch 300/469 | Gen/Dis Loss: 0.9631/0.6111
Epoch: 082/100 | Batch 400/469 | Gen/Dis Loss: 1.0426/0.6404
Time elapsed: 16.39 min
Epoch: 083/100 | Batch 000/469 | Gen/Dis Loss: 1.0449/0.6439
Epoch: 083/100 | Batch 100/469 | Gen/Dis Loss: 0.9290/0.6319
Epoch: 083/100 | Batch 200/469 | Gen/Dis Loss: 0.8768/0.6186
Epoch: 083/100 | Batch 300/469 | Gen/Dis Loss: 0.8202/0.6050
Epoch: 083/100 | Batch 400/469 | Gen/Dis Loss: 0.8840/0.6135
Time elapsed: 16.63 min
Epoch: 084/100 | Batch 000/469 | Gen/Dis Loss: 1.0632/0.6157
Epoch: 084/100 | Batch 100/469 | Gen/Dis Loss: 0.8863/0.5954
Epoch: 084/100 | Batch 200/469 | Gen/Dis Loss: 1.0618/0.6428
Epoch: 084/100 | Batch 300/469 | Gen/Dis Loss: 1.0627/0.5874
Epoch: 084/100 | Batch 400/469 | Gen/Dis Loss: 0.9114/0.6118
Time elapsed: 16.90 min
Epoch: 085/100 | Batch 000/469 | Gen/Dis Loss: 0.8453/0.6248
Epoch: 085/100 | Batch 100/469 | Gen/Dis Loss: 1.0609/0.6182
Epoch: 085/100 | Batch 200/469 | Gen/Dis Loss: 0.8899/0.6170
Epoch: 085/100 | Batch 300/469 | Gen/Dis Loss: 0.9211/0.6023
Epoch: 085/100 | Batch 400/469 | Gen/Dis Loss: 0.8161/0.6840
Time elapsed: 17.21 min
Epoch: 086/100 | Batch 000/469 | Gen/Dis Loss: 0.9190/0.5845
Epoch: 086/100 | Batch 100/469 | Gen/Dis Loss: 1.0762/0.6450
Epoch: 086/100 | Batch 200/469 | Gen/Dis Loss: 1.0070/0.6302
Epoch: 086/100 | Batch 300/469 | Gen/Dis Loss: 0.8805/0.6313
Epoch: 086/100 | Batch 400/469 | Gen/Dis Loss: 0.8568/0.6320
Time elapsed: 17.47 min
Epoch: 087/100 | Batch 000/469 | Gen/Dis Loss: 0.9597/0.6527
Epoch: 087/100 | Batch 100/469 | Gen/Dis Loss: 0.8664/0.6339
Epoch: 087/100 | Batch 200/469 | Gen/Dis Loss: 1.0466/0.6181
Epoch: 087/100 | Batch 300/469 | Gen/Dis Loss: 0.8645/0.6272
Epoch: 087/100 | Batch 400/469 | Gen/Dis Loss: 0.8296/0.6125
Time elapsed: 17.71 min
Epoch: 088/100 | Batch 000/469 | Gen/Dis Loss: 0.8497/0.6134
Epoch: 088/100 | Batch 100/469 | Gen/Dis Loss: 0.7984/0.6551
Epoch: 088/100 | Batch 200/469 | Gen/Dis Loss: 0.7777/0.6737
Epoch: 088/100 | Batch 300/469 | Gen/Dis Loss: 0.8157/0.6250
Epoch: 088/100 | Batch 400/469 | Gen/Dis Loss: 0.7993/0.6446
Time elapsed: 17.96 min
Epoch: 089/100 | Batch 000/469 | Gen/Dis Loss: 0.8526/0.6219
Epoch: 089/100 | Batch 100/469 | Gen/Dis Loss: 0.9565/0.6241
Epoch: 089/100 | Batch 200/469 | Gen/Dis Loss: 1.0437/0.6488
Epoch: 089/100 | Batch 300/469 | Gen/Dis Loss: 0.8082/0.6521
Epoch: 089/100 | Batch 400/469 | Gen/Dis Loss: 0.9082/0.6187
Time elapsed: 18.20 min
Epoch: 090/100 | Batch 000/469 | Gen/Dis Loss: 0.8507/0.6127
Epoch: 090/100 | Batch 100/469 | Gen/Dis Loss: 0.8370/0.6160
Epoch: 090/100 | Batch 200/469 | Gen/Dis Loss: 0.8270/0.6310
Epoch: 090/100 | Batch 300/469 | Gen/Dis Loss: 0.9313/0.6230
Epoch: 090/100 | Batch 400/469 | Gen/Dis Loss: 0.9462/0.6391
Time elapsed: 18.46 min
Epoch: 091/100 | Batch 000/469 | Gen/Dis Loss: 0.9294/0.6189
Epoch: 091/100 | Batch 100/469 | Gen/Dis Loss: 1.0533/0.6279
Epoch: 091/100 | Batch 200/469 | Gen/Dis Loss: 0.9623/0.6491
Epoch: 091/100 | Batch 300/469 | Gen/Dis Loss: 0.8521/0.6031
Epoch: 091/100 | Batch 400/469 | Gen/Dis Loss: 0.8233/0.6487
Time elapsed: 18.70 min
Epoch: 092/100 | Batch 000/469 | Gen/Dis Loss: 0.9691/0.6357
Epoch: 092/100 | Batch 100/469 | Gen/Dis Loss: 0.8876/0.6303
Epoch: 092/100 | Batch 200/469 | Gen/Dis Loss: 0.9333/0.6201
Epoch: 092/100 | Batch 300/469 | Gen/Dis Loss: 0.8813/0.5981
Epoch: 092/100 | Batch 400/469 | Gen/Dis Loss: 0.9026/0.6128
Time elapsed: 18.94 min
Epoch: 093/100 | Batch 000/469 | Gen/Dis Loss: 0.8874/0.6373
Epoch: 093/100 | Batch 100/469 | Gen/Dis Loss: 0.8537/0.6204
Epoch: 093/100 | Batch 200/469 | Gen/Dis Loss: 0.7982/0.6342
Epoch: 093/100 | Batch 300/469 | Gen/Dis Loss: 0.9005/0.6010
Epoch: 093/100 | Batch 400/469 | Gen/Dis Loss: 1.0532/0.6091
Time elapsed: 19.20 min
Epoch: 094/100 | Batch 000/469 | Gen/Dis Loss: 0.9877/0.6426
Epoch: 094/100 | Batch 100/469 | Gen/Dis Loss: 0.8308/0.6501
Epoch: 094/100 | Batch 200/469 | Gen/Dis Loss: 0.9217/0.6269
Epoch: 094/100 | Batch 300/469 | Gen/Dis Loss: 0.9183/0.6632
Epoch: 094/100 | Batch 400/469 | Gen/Dis Loss: 0.8859/0.6128
Time elapsed: 19.46 min
Epoch: 095/100 | Batch 000/469 | Gen/Dis Loss: 0.9032/0.6331
Epoch: 095/100 | Batch 100/469 | Gen/Dis Loss: 0.8298/0.6976
Epoch: 095/100 | Batch 200/469 | Gen/Dis Loss: 1.0004/0.6347
Epoch: 095/100 | Batch 300/469 | Gen/Dis Loss: 0.9161/0.6169
Epoch: 095/100 | Batch 400/469 | Gen/Dis Loss: 0.7622/0.6884
Time elapsed: 19.71 min
Epoch: 096/100 | Batch 000/469 | Gen/Dis Loss: 0.8816/0.5997
Epoch: 096/100 | Batch 100/469 | Gen/Dis Loss: 0.9499/0.5969
Epoch: 096/100 | Batch 200/469 | Gen/Dis Loss: 0.8974/0.6214
Epoch: 096/100 | Batch 300/469 | Gen/Dis Loss: 0.8853/0.6259
Epoch: 096/100 | Batch 400/469 | Gen/Dis Loss: 0.8107/0.6027
Time elapsed: 19.95 min
Epoch: 097/100 | Batch 000/469 | Gen/Dis Loss: 0.9242/0.6189
Epoch: 097/100 | Batch 100/469 | Gen/Dis Loss: 0.8917/0.6491
Epoch: 097/100 | Batch 200/469 | Gen/Dis Loss: 0.8729/0.6375
Epoch: 097/100 | Batch 300/469 | Gen/Dis Loss: 0.8848/0.5950
Epoch: 097/100 | Batch 400/469 | Gen/Dis Loss: 0.8502/0.6296
Time elapsed: 20.21 min
Epoch: 098/100 | Batch 000/469 | Gen/Dis Loss: 0.9020/0.6453
Epoch: 098/100 | Batch 100/469 | Gen/Dis Loss: 1.1077/0.5882
Epoch: 098/100 | Batch 200/469 | Gen/Dis Loss: 0.9468/0.6364
Epoch: 098/100 | Batch 300/469 | Gen/Dis Loss: 0.8636/0.6313
Epoch: 098/100 | Batch 400/469 | Gen/Dis Loss: 0.9089/0.6911
Time elapsed: 20.45 min
Epoch: 099/100 | Batch 000/469 | Gen/Dis Loss: 0.9101/0.6386
Epoch: 099/100 | Batch 100/469 | Gen/Dis Loss: 0.8036/0.6396
Epoch: 099/100 | Batch 200/469 | Gen/Dis Loss: 0.9393/0.6060
Epoch: 099/100 | Batch 300/469 | Gen/Dis Loss: 0.8776/0.6242
Epoch: 099/100 | Batch 400/469 | Gen/Dis Loss: 0.8244/0.6278
Time elapsed: 20.68 min
Epoch: 100/100 | Batch 000/469 | Gen/Dis Loss: 0.8623/0.6496
Epoch: 100/100 | Batch 100/469 | Gen/Dis Loss: 0.9965/0.5964
Epoch: 100/100 | Batch 200/469 | Gen/Dis Loss: 0.8666/0.6306
Epoch: 100/100 | Batch 300/469 | Gen/Dis Loss: 1.1555/0.6634
Epoch: 100/100 | Batch 400/469 | Gen/Dis Loss: 0.9071/0.6545
Time elapsed: 20.94 min
Total Training Time: 20.94 min

Evaluation

In [7]:
%matplotlib inline
import matplotlib.pyplot as plt
In [8]:
ax1 = plt.subplot(1, 1, 1)
ax1.plot(range(len(gener_costs)), gener_costs, label='Generator loss')
ax1.plot(range(len(discr_costs)), discr_costs, label='Discriminator loss')
ax1.set_xlabel('Iterations')
ax1.set_ylabel('Loss')
ax1.legend()

###################
# Set scond x-axis
ax2 = ax1.twiny()
newlabel = list(range(NUM_EPOCHS+1))
iter_per_epoch = len(train_loader)
newpos = [e*iter_per_epoch for e in newlabel]

ax2.set_xticklabels(newlabel[::10])
ax2.set_xticks(newpos[::10])

ax2.xaxis.set_ticks_position('bottom')
ax2.xaxis.set_label_position('bottom')
ax2.spines['bottom'].set_position(('outward', 45))
ax2.set_xlabel('Epochs')
ax2.set_xlim(ax1.get_xlim())
###################

plt.show()
In [9]:
##########################
### VISUALIZATION
##########################


model.eval()
# Make new images
z = torch.zeros((5, LATENT_DIM)).uniform_(-1.0, 1.0).to(device)
generated_features = model.generator_forward(z)
imgs = generated_features.view(-1, 28, 28)

fig, axes = plt.subplots(nrows=1, ncols=5, figsize=(20, 2.5))


for i, ax in enumerate(axes):
    axes[i].imshow(imgs[i].to(torch.device('cpu')).detach(), cmap='binary')