Deep Learning Models -- A collection of various deep learning architectures, models, and tips for TensorFlow and PyTorch in Jupyter Notebooks.
%load_ext watermark
%watermark -a 'Sebastian Raschka' -v -p torch
Sebastian Raschka CPython 3.6.8 IPython 7.2.0 torch 1.0.1.post2
import time
import numpy as np
import torch
import torch.nn.functional as F
from torchvision import datasets
from torchvision import transforms
import torch.nn as nn
from torch.utils.data import DataLoader
if torch.cuda.is_available():
torch.backends.cudnn.deterministic = True
##########################
### SETTINGS
##########################
# Device
device = torch.device("cuda:2" if torch.cuda.is_available() else "cpu")
# Hyperparameters
random_seed = 123
generator_learning_rate = 0.0001
discriminator_learning_rate = 0.0001
num_epochs = 100
BATCH_SIZE = 128
LATENT_DIM = 100
IMG_SHAPE = (1, 28, 28)
IMG_SIZE = 1
for x in IMG_SHAPE:
IMG_SIZE *= x
##########################
### MNIST DATASET
##########################
# Note transforms.ToTensor() scales input images
# to 0-1 range
train_dataset = datasets.MNIST(root='data',
train=True,
transform=transforms.ToTensor(),
download=True)
test_dataset = datasets.MNIST(root='data',
train=False,
transform=transforms.ToTensor())
train_loader = DataLoader(dataset=train_dataset,
batch_size=BATCH_SIZE,
num_workers=4,
shuffle=True)
test_loader = DataLoader(dataset=test_dataset,
batch_size=BATCH_SIZE,
num_workers=4,
shuffle=False)
# Checking the dataset
for images, labels in train_loader:
print('Image batch dimensions:', images.shape)
print('Image label dimensions:', labels.shape)
break
Image batch dimensions: torch.Size([128, 1, 28, 28]) Image label dimensions: torch.Size([128])
##########################
### MODEL
##########################
class Flatten(nn.Module):
def forward(self, input):
return input.view(input.size(0), -1)
class Reshape1(nn.Module):
def forward(self, input):
return input.view(input.size(0), 64, 7, 7)
class GAN(torch.nn.Module):
def __init__(self):
super(GAN, self).__init__()
self.generator = nn.Sequential(
nn.Linear(LATENT_DIM, 3136, bias=False),
nn.BatchNorm1d(num_features=3136),
nn.LeakyReLU(inplace=True, negative_slope=0.0001),
Reshape1(),
nn.ConvTranspose2d(in_channels=64, out_channels=32, kernel_size=(3, 3), stride=(2, 2), padding=1, bias=False),
nn.BatchNorm2d(num_features=32),
nn.LeakyReLU(inplace=True, negative_slope=0.0001),
#nn.Dropout2d(p=0.2),
nn.ConvTranspose2d(in_channels=32, out_channels=16, kernel_size=(3, 3), stride=(2, 2), padding=1, bias=False),
nn.BatchNorm2d(num_features=16),
nn.LeakyReLU(inplace=True, negative_slope=0.0001),
#nn.Dropout2d(p=0.2),
nn.ConvTranspose2d(in_channels=16, out_channels=8, kernel_size=(3, 3), stride=(1, 1), padding=0, bias=False),
nn.BatchNorm2d(num_features=8),
nn.LeakyReLU(inplace=True, negative_slope=0.0001),
#nn.Dropout2d(p=0.2),
nn.ConvTranspose2d(in_channels=8, out_channels=1, kernel_size=(2, 2), stride=(1, 1), padding=0, bias=False),
nn.Tanh()
)
self.discriminator = nn.Sequential(
nn.Conv2d(in_channels=1, out_channels=8, padding=1, kernel_size=(3, 3), stride=(2, 2), bias=False),
nn.BatchNorm2d(num_features=8),
nn.LeakyReLU(inplace=True, negative_slope=0.0001),
#nn.Dropout2d(p=0.2),
nn.Conv2d(in_channels=8, out_channels=32, padding=1, kernel_size=(3, 3), stride=(2, 2), bias=False),
nn.BatchNorm2d(num_features=32),
nn.LeakyReLU(inplace=True, negative_slope=0.0001),
#nn.Dropout2d(p=0.2),
Flatten(),
nn.Linear(7*7*32, 1),
#nn.Sigmoid()
)
def generator_forward(self, z):
img = self.generator(z)
return img
def discriminator_forward(self, img):
pred = model.discriminator(img)
return pred.view(-1)
torch.manual_seed(random_seed)
#del model
model = GAN()
model = model.to(device)
print(model)
GAN( (generator): Sequential( (0): Linear(in_features=100, out_features=3136, bias=False) (1): BatchNorm1d(3136, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (2): LeakyReLU(negative_slope=0.0001, inplace) (3): Reshape1() (4): ConvTranspose2d(64, 32, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) (5): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (6): LeakyReLU(negative_slope=0.0001, inplace) (7): ConvTranspose2d(32, 16, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) (8): BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (9): LeakyReLU(negative_slope=0.0001, inplace) (10): ConvTranspose2d(16, 8, kernel_size=(3, 3), stride=(1, 1), bias=False) (11): BatchNorm2d(8, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (12): LeakyReLU(negative_slope=0.0001, inplace) (13): ConvTranspose2d(8, 1, kernel_size=(2, 2), stride=(1, 1), bias=False) (14): Tanh() ) (discriminator): Sequential( (0): Conv2d(1, 8, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) (1): BatchNorm2d(8, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (2): LeakyReLU(negative_slope=0.0001, inplace) (3): Conv2d(8, 32, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) (4): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (5): LeakyReLU(negative_slope=0.0001, inplace) (6): Flatten() (7): Linear(in_features=1568, out_features=1, bias=True) ) )
### ## FOR DEBUGGING
"""
outputs = []
def hook(module, input, output):
outputs.append(output)
#for i, layer in enumerate(model.discriminator):
# if isinstance(layer, torch.nn.modules.conv.Conv2d):
# model.discriminator[i].register_forward_hook(hook)
for i, layer in enumerate(model.generator):
if isinstance(layer, torch.nn.modules.ConvTranspose2d):
model.generator[i].register_forward_hook(hook)
"""
'\noutputs = []\ndef hook(module, input, output):\n outputs.append(output)\n\n#for i, layer in enumerate(model.discriminator):\n# if isinstance(layer, torch.nn.modules.conv.Conv2d):\n# model.discriminator[i].register_forward_hook(hook)\n\nfor i, layer in enumerate(model.generator):\n if isinstance(layer, torch.nn.modules.ConvTranspose2d):\n model.generator[i].register_forward_hook(hook)\n'
optim_gener = torch.optim.Adam(model.generator.parameters(), lr=generator_learning_rate)
optim_discr = torch.optim.Adam(model.discriminator.parameters(), lr=discriminator_learning_rate)
start_time = time.time()
discr_costs = []
gener_costs = []
for epoch in range(num_epochs):
model = model.train()
for batch_idx, (features, targets) in enumerate(train_loader):
# Normalize images to [-1, 1] range
features = (features - 0.5)*2.
features = features.view(-1, IMG_SIZE).to(device)
targets = targets.to(device)
valid = torch.ones(targets.size(0)).float().to(device)
fake = torch.zeros(targets.size(0)).float().to(device)
### FORWARD AND BACK PROP
# --------------------------
# Train Generator
# --------------------------
# Make new images
z = torch.zeros((targets.size(0), LATENT_DIM)).uniform_(-1.0, 1.0).to(device)
generated_features = model.generator_forward(z)
# Loss for fooling the discriminator
discr_pred = model.discriminator_forward(generated_features.view(targets.size(0), 1, 28, 28))
gener_loss = F.binary_cross_entropy_with_logits(discr_pred, valid)
optim_gener.zero_grad()
gener_loss.backward()
optim_gener.step()
# --------------------------
# Train Discriminator
# --------------------------
discr_pred_real = model.discriminator_forward(features.view(targets.size(0), 1, 28, 28))
real_loss = F.binary_cross_entropy_with_logits(discr_pred_real, valid)
discr_pred_fake = model.discriminator_forward(generated_features.view(targets.size(0), 1, 28, 28).detach())
fake_loss = F.binary_cross_entropy_with_logits(discr_pred_fake, fake)
discr_loss = 0.5*(real_loss + fake_loss)
optim_discr.zero_grad()
discr_loss.backward()
optim_discr.step()
discr_costs.append(discr_loss.item())
gener_costs.append(gener_loss.item())
### LOGGING
if not batch_idx % 100:
print ('Epoch: %03d/%03d | Batch %03d/%03d | Gen/Dis Loss: %.4f/%.4f'
%(epoch+1, num_epochs, batch_idx,
len(train_loader), gener_loss, discr_loss))
print('Time elapsed: %.2f min' % ((time.time() - start_time)/60))
print('Total Training Time: %.2f min' % ((time.time() - start_time)/60))
Epoch: 001/100 | Batch 000/469 | Gen/Dis Loss: 0.7258/0.7058 Epoch: 001/100 | Batch 100/469 | Gen/Dis Loss: 0.7567/0.5799 Epoch: 001/100 | Batch 200/469 | Gen/Dis Loss: 0.8557/0.5516 Epoch: 001/100 | Batch 300/469 | Gen/Dis Loss: 0.8765/0.5584 Epoch: 001/100 | Batch 400/469 | Gen/Dis Loss: 0.9139/0.5442 Time elapsed: 0.11 min Epoch: 002/100 | Batch 000/469 | Gen/Dis Loss: 0.9574/0.5057 Epoch: 002/100 | Batch 100/469 | Gen/Dis Loss: 0.8926/0.5280 Epoch: 002/100 | Batch 200/469 | Gen/Dis Loss: 0.8679/0.5756 Epoch: 002/100 | Batch 300/469 | Gen/Dis Loss: 0.8515/0.5659 Epoch: 002/100 | Batch 400/469 | Gen/Dis Loss: 0.8613/0.5464 Time elapsed: 0.21 min Epoch: 003/100 | Batch 000/469 | Gen/Dis Loss: 0.8104/0.5903 Epoch: 003/100 | Batch 100/469 | Gen/Dis Loss: 0.8360/0.5638 Epoch: 003/100 | Batch 200/469 | Gen/Dis Loss: 0.8020/0.6238 Epoch: 003/100 | Batch 300/469 | Gen/Dis Loss: 0.8124/0.5977 Epoch: 003/100 | Batch 400/469 | Gen/Dis Loss: 0.8207/0.6125 Time elapsed: 0.32 min Epoch: 004/100 | Batch 000/469 | Gen/Dis Loss: 0.8289/0.6016 Epoch: 004/100 | Batch 100/469 | Gen/Dis Loss: 0.8377/0.5820 Epoch: 004/100 | Batch 200/469 | Gen/Dis Loss: 0.7950/0.6195 Epoch: 004/100 | Batch 300/469 | Gen/Dis Loss: 0.8128/0.5942 Epoch: 004/100 | Batch 400/469 | Gen/Dis Loss: 0.8136/0.5993 Time elapsed: 0.43 min Epoch: 005/100 | Batch 000/469 | Gen/Dis Loss: 0.8302/0.5856 Epoch: 005/100 | Batch 100/469 | Gen/Dis Loss: 0.8233/0.6077 Epoch: 005/100 | Batch 200/469 | Gen/Dis Loss: 0.8893/0.5796 Epoch: 005/100 | Batch 300/469 | Gen/Dis Loss: 0.8582/0.5901 Epoch: 005/100 | Batch 400/469 | Gen/Dis Loss: 0.8452/0.5712 Time elapsed: 0.54 min Epoch: 006/100 | Batch 000/469 | Gen/Dis Loss: 0.8466/0.5833 Epoch: 006/100 | Batch 100/469 | Gen/Dis Loss: 0.8409/0.5632 Epoch: 006/100 | Batch 200/469 | Gen/Dis Loss: 0.8691/0.5583 Epoch: 006/100 | Batch 300/469 | Gen/Dis Loss: 0.8728/0.5750 Epoch: 006/100 | Batch 400/469 | Gen/Dis Loss: 0.8528/0.5493 Time elapsed: 0.65 min Epoch: 007/100 | Batch 000/469 | Gen/Dis Loss: 0.9289/0.5365 Epoch: 007/100 | Batch 100/469 | Gen/Dis Loss: 0.9377/0.5138 Epoch: 007/100 | Batch 200/469 | Gen/Dis Loss: 0.9518/0.5212 Epoch: 007/100 | Batch 300/469 | Gen/Dis Loss: 0.9563/0.5138 Epoch: 007/100 | Batch 400/469 | Gen/Dis Loss: 0.9499/0.5089 Time elapsed: 0.76 min Epoch: 008/100 | Batch 000/469 | Gen/Dis Loss: 0.9215/0.5256 Epoch: 008/100 | Batch 100/469 | Gen/Dis Loss: 0.9297/0.5364 Epoch: 008/100 | Batch 200/469 | Gen/Dis Loss: 0.9361/0.5601 Epoch: 008/100 | Batch 300/469 | Gen/Dis Loss: 0.8759/0.5494 Epoch: 008/100 | Batch 400/469 | Gen/Dis Loss: 0.8834/0.5512 Time elapsed: 0.87 min Epoch: 009/100 | Batch 000/469 | Gen/Dis Loss: 0.9020/0.5370 Epoch: 009/100 | Batch 100/469 | Gen/Dis Loss: 0.9490/0.5464 Epoch: 009/100 | Batch 200/469 | Gen/Dis Loss: 0.9454/0.5416 Epoch: 009/100 | Batch 300/469 | Gen/Dis Loss: 0.9583/0.5306 Epoch: 009/100 | Batch 400/469 | Gen/Dis Loss: 0.8727/0.5780 Time elapsed: 0.99 min Epoch: 010/100 | Batch 000/469 | Gen/Dis Loss: 0.8831/0.5621 Epoch: 010/100 | Batch 100/469 | Gen/Dis Loss: 0.7967/0.6041 Epoch: 010/100 | Batch 200/469 | Gen/Dis Loss: 0.9212/0.5492 Epoch: 010/100 | Batch 300/469 | Gen/Dis Loss: 0.8104/0.5859 Epoch: 010/100 | Batch 400/469 | Gen/Dis Loss: 0.8940/0.5626 Time elapsed: 1.10 min Epoch: 011/100 | Batch 000/469 | Gen/Dis Loss: 0.8486/0.5441 Epoch: 011/100 | Batch 100/469 | Gen/Dis Loss: 0.8691/0.5584 Epoch: 011/100 | Batch 200/469 | Gen/Dis Loss: 0.9293/0.5677 Epoch: 011/100 | Batch 300/469 | Gen/Dis Loss: 0.9209/0.5508 Epoch: 011/100 | Batch 400/469 | Gen/Dis Loss: 0.9619/0.5608 Time elapsed: 1.21 min Epoch: 012/100 | Batch 000/469 | Gen/Dis Loss: 0.8695/0.6126 Epoch: 012/100 | Batch 100/469 | Gen/Dis Loss: 0.8749/0.5669 Epoch: 012/100 | Batch 200/469 | Gen/Dis Loss: 0.8635/0.5544 Epoch: 012/100 | Batch 300/469 | Gen/Dis Loss: 0.8324/0.5899 Epoch: 012/100 | Batch 400/469 | Gen/Dis Loss: 0.9012/0.5869 Time elapsed: 1.32 min Epoch: 013/100 | Batch 000/469 | Gen/Dis Loss: 0.9355/0.5585 Epoch: 013/100 | Batch 100/469 | Gen/Dis Loss: 0.8476/0.5716 Epoch: 013/100 | Batch 200/469 | Gen/Dis Loss: 0.8727/0.5741 Epoch: 013/100 | Batch 300/469 | Gen/Dis Loss: 0.8235/0.6209 Epoch: 013/100 | Batch 400/469 | Gen/Dis Loss: 0.8470/0.5796 Time elapsed: 1.43 min Epoch: 014/100 | Batch 000/469 | Gen/Dis Loss: 0.8618/0.5966 Epoch: 014/100 | Batch 100/469 | Gen/Dis Loss: 0.9027/0.5763 Epoch: 014/100 | Batch 200/469 | Gen/Dis Loss: 0.8810/0.6095 Epoch: 014/100 | Batch 300/469 | Gen/Dis Loss: 0.8508/0.6071 Epoch: 014/100 | Batch 400/469 | Gen/Dis Loss: 0.8484/0.6244 Time elapsed: 1.54 min Epoch: 015/100 | Batch 000/469 | Gen/Dis Loss: 0.9484/0.5475 Epoch: 015/100 | Batch 100/469 | Gen/Dis Loss: 0.8212/0.6268 Epoch: 015/100 | Batch 200/469 | Gen/Dis Loss: 0.8781/0.5900 Epoch: 015/100 | Batch 300/469 | Gen/Dis Loss: 0.8621/0.5965 Epoch: 015/100 | Batch 400/469 | Gen/Dis Loss: 0.9071/0.5907 Time elapsed: 1.66 min Epoch: 016/100 | Batch 000/469 | Gen/Dis Loss: 0.8462/0.6186 Epoch: 016/100 | Batch 100/469 | Gen/Dis Loss: 0.9176/0.5835 Epoch: 016/100 | Batch 200/469 | Gen/Dis Loss: 0.8803/0.5979 Epoch: 016/100 | Batch 300/469 | Gen/Dis Loss: 0.8445/0.6224 Epoch: 016/100 | Batch 400/469 | Gen/Dis Loss: 0.8850/0.5681 Time elapsed: 1.77 min Epoch: 017/100 | Batch 000/469 | Gen/Dis Loss: 0.9128/0.5657 Epoch: 017/100 | Batch 100/469 | Gen/Dis Loss: 0.8875/0.6418 Epoch: 017/100 | Batch 200/469 | Gen/Dis Loss: 0.8722/0.5973 Epoch: 017/100 | Batch 300/469 | Gen/Dis Loss: 0.7493/0.6580 Epoch: 017/100 | Batch 400/469 | Gen/Dis Loss: 0.8926/0.6142 Time elapsed: 1.88 min Epoch: 018/100 | Batch 000/469 | Gen/Dis Loss: 0.7542/0.6373 Epoch: 018/100 | Batch 100/469 | Gen/Dis Loss: 0.8295/0.5998 Epoch: 018/100 | Batch 200/469 | Gen/Dis Loss: 0.8527/0.5995 Epoch: 018/100 | Batch 300/469 | Gen/Dis Loss: 0.8710/0.6157 Epoch: 018/100 | Batch 400/469 | Gen/Dis Loss: 0.8623/0.5908 Time elapsed: 1.99 min Epoch: 019/100 | Batch 000/469 | Gen/Dis Loss: 0.8325/0.6488 Epoch: 019/100 | Batch 100/469 | Gen/Dis Loss: 0.8263/0.6105 Epoch: 019/100 | Batch 200/469 | Gen/Dis Loss: 0.8115/0.6201 Epoch: 019/100 | Batch 300/469 | Gen/Dis Loss: 0.8984/0.6106 Epoch: 019/100 | Batch 400/469 | Gen/Dis Loss: 0.7243/0.6525 Time elapsed: 2.10 min Epoch: 020/100 | Batch 000/469 | Gen/Dis Loss: 0.8168/0.6151 Epoch: 020/100 | Batch 100/469 | Gen/Dis Loss: 0.8724/0.5855 Epoch: 020/100 | Batch 200/469 | Gen/Dis Loss: 0.8348/0.6396 Epoch: 020/100 | Batch 300/469 | Gen/Dis Loss: 0.8068/0.6394 Epoch: 020/100 | Batch 400/469 | Gen/Dis Loss: 0.7860/0.6438 Time elapsed: 2.22 min Epoch: 021/100 | Batch 000/469 | Gen/Dis Loss: 0.8073/0.6350 Epoch: 021/100 | Batch 100/469 | Gen/Dis Loss: 0.8256/0.6216 Epoch: 021/100 | Batch 200/469 | Gen/Dis Loss: 0.8479/0.5804 Epoch: 021/100 | Batch 300/469 | Gen/Dis Loss: 0.7799/0.6330 Epoch: 021/100 | Batch 400/469 | Gen/Dis Loss: 0.7092/0.6785 Time elapsed: 2.33 min Epoch: 022/100 | Batch 000/469 | Gen/Dis Loss: 0.7784/0.6390 Epoch: 022/100 | Batch 100/469 | Gen/Dis Loss: 0.7742/0.6419 Epoch: 022/100 | Batch 200/469 | Gen/Dis Loss: 0.7467/0.6831 Epoch: 022/100 | Batch 300/469 | Gen/Dis Loss: 0.8776/0.6048 Epoch: 022/100 | Batch 400/469 | Gen/Dis Loss: 0.7749/0.6724 Time elapsed: 2.44 min Epoch: 023/100 | Batch 000/469 | Gen/Dis Loss: 0.8387/0.6316 Epoch: 023/100 | Batch 100/469 | Gen/Dis Loss: 0.7872/0.6180 Epoch: 023/100 | Batch 200/469 | Gen/Dis Loss: 0.7965/0.6351 Epoch: 023/100 | Batch 300/469 | Gen/Dis Loss: 0.8374/0.6462 Epoch: 023/100 | Batch 400/469 | Gen/Dis Loss: 0.7291/0.6859 Time elapsed: 2.55 min Epoch: 024/100 | Batch 000/469 | Gen/Dis Loss: 0.7957/0.6495 Epoch: 024/100 | Batch 100/469 | Gen/Dis Loss: 0.7377/0.6517 Epoch: 024/100 | Batch 200/469 | Gen/Dis Loss: 0.9071/0.6035 Epoch: 024/100 | Batch 300/469 | Gen/Dis Loss: 0.7824/0.6577 Epoch: 024/100 | Batch 400/469 | Gen/Dis Loss: 0.7642/0.6780 Time elapsed: 2.67 min Epoch: 025/100 | Batch 000/469 | Gen/Dis Loss: 0.8147/0.6543 Epoch: 025/100 | Batch 100/469 | Gen/Dis Loss: 0.7213/0.6899 Epoch: 025/100 | Batch 200/469 | Gen/Dis Loss: 0.7999/0.6510 Epoch: 025/100 | Batch 300/469 | Gen/Dis Loss: 0.7496/0.6523 Epoch: 025/100 | Batch 400/469 | Gen/Dis Loss: 0.7749/0.6136 Time elapsed: 2.78 min Epoch: 026/100 | Batch 000/469 | Gen/Dis Loss: 0.7577/0.6663 Epoch: 026/100 | Batch 100/469 | Gen/Dis Loss: 0.7664/0.6674 Epoch: 026/100 | Batch 200/469 | Gen/Dis Loss: 0.7707/0.6712 Epoch: 026/100 | Batch 300/469 | Gen/Dis Loss: 0.8347/0.6354 Epoch: 026/100 | Batch 400/469 | Gen/Dis Loss: 0.7879/0.6534 Time elapsed: 2.89 min Epoch: 027/100 | Batch 000/469 | Gen/Dis Loss: 0.8289/0.6536 Epoch: 027/100 | Batch 100/469 | Gen/Dis Loss: 0.7284/0.6865 Epoch: 027/100 | Batch 200/469 | Gen/Dis Loss: 0.7622/0.6486 Epoch: 027/100 | Batch 300/469 | Gen/Dis Loss: 0.7362/0.6817 Epoch: 027/100 | Batch 400/469 | Gen/Dis Loss: 0.8032/0.6797 Time elapsed: 3.00 min Epoch: 028/100 | Batch 000/469 | Gen/Dis Loss: 0.7426/0.6480 Epoch: 028/100 | Batch 100/469 | Gen/Dis Loss: 0.8362/0.6409 Epoch: 028/100 | Batch 200/469 | Gen/Dis Loss: 0.8773/0.6394 Epoch: 028/100 | Batch 300/469 | Gen/Dis Loss: 0.7430/0.6850 Epoch: 028/100 | Batch 400/469 | Gen/Dis Loss: 0.7607/0.6673 Time elapsed: 3.11 min Epoch: 029/100 | Batch 000/469 | Gen/Dis Loss: 0.8062/0.6492 Epoch: 029/100 | Batch 100/469 | Gen/Dis Loss: 0.7696/0.6918 Epoch: 029/100 | Batch 200/469 | Gen/Dis Loss: 0.7795/0.6734 Epoch: 029/100 | Batch 300/469 | Gen/Dis Loss: 0.8234/0.6511 Epoch: 029/100 | Batch 400/469 | Gen/Dis Loss: 0.8141/0.6537 Time elapsed: 3.23 min Epoch: 030/100 | Batch 000/469 | Gen/Dis Loss: 0.7165/0.7027 Epoch: 030/100 | Batch 100/469 | Gen/Dis Loss: 0.7798/0.6663 Epoch: 030/100 | Batch 200/469 | Gen/Dis Loss: 0.7614/0.6543 Epoch: 030/100 | Batch 300/469 | Gen/Dis Loss: 0.7124/0.6648 Epoch: 030/100 | Batch 400/469 | Gen/Dis Loss: 0.8009/0.6549 Time elapsed: 3.34 min Epoch: 031/100 | Batch 000/469 | Gen/Dis Loss: 0.7622/0.6629 Epoch: 031/100 | Batch 100/469 | Gen/Dis Loss: 0.7727/0.6778 Epoch: 031/100 | Batch 200/469 | Gen/Dis Loss: 0.7585/0.6426 Epoch: 031/100 | Batch 300/469 | Gen/Dis Loss: 0.7830/0.6830 Epoch: 031/100 | Batch 400/469 | Gen/Dis Loss: 0.7606/0.6691 Time elapsed: 3.45 min Epoch: 032/100 | Batch 000/469 | Gen/Dis Loss: 0.7618/0.6737 Epoch: 032/100 | Batch 100/469 | Gen/Dis Loss: 0.7255/0.6922 Epoch: 032/100 | Batch 200/469 | Gen/Dis Loss: 0.7635/0.6686 Epoch: 032/100 | Batch 300/469 | Gen/Dis Loss: 0.7324/0.6678 Epoch: 032/100 | Batch 400/469 | Gen/Dis Loss: 0.7082/0.6948 Time elapsed: 3.56 min Epoch: 033/100 | Batch 000/469 | Gen/Dis Loss: 0.7918/0.6407 Epoch: 033/100 | Batch 100/469 | Gen/Dis Loss: 0.7922/0.6447 Epoch: 033/100 | Batch 200/469 | Gen/Dis Loss: 0.7191/0.6740 Epoch: 033/100 | Batch 300/469 | Gen/Dis Loss: 0.7352/0.6663 Epoch: 033/100 | Batch 400/469 | Gen/Dis Loss: 0.7784/0.6721 Time elapsed: 3.67 min Epoch: 034/100 | Batch 000/469 | Gen/Dis Loss: 0.7811/0.6490 Epoch: 034/100 | Batch 100/469 | Gen/Dis Loss: 0.8164/0.6904 Epoch: 034/100 | Batch 200/469 | Gen/Dis Loss: 0.7652/0.6803 Epoch: 034/100 | Batch 300/469 | Gen/Dis Loss: 0.7240/0.6910 Epoch: 034/100 | Batch 400/469 | Gen/Dis Loss: 0.8004/0.6485 Time elapsed: 3.79 min Epoch: 035/100 | Batch 000/469 | Gen/Dis Loss: 0.7527/0.6897 Epoch: 035/100 | Batch 100/469 | Gen/Dis Loss: 0.7392/0.7012 Epoch: 035/100 | Batch 200/469 | Gen/Dis Loss: 0.7046/0.6876 Epoch: 035/100 | Batch 300/469 | Gen/Dis Loss: 0.7201/0.6918 Epoch: 035/100 | Batch 400/469 | Gen/Dis Loss: 0.7685/0.6511 Time elapsed: 3.90 min Epoch: 036/100 | Batch 000/469 | Gen/Dis Loss: 0.7277/0.6685 Epoch: 036/100 | Batch 100/469 | Gen/Dis Loss: 0.7403/0.6693 Epoch: 036/100 | Batch 200/469 | Gen/Dis Loss: 0.7376/0.7071 Epoch: 036/100 | Batch 300/469 | Gen/Dis Loss: 0.7292/0.6908 Epoch: 036/100 | Batch 400/469 | Gen/Dis Loss: 0.7131/0.6788 Time elapsed: 4.01 min Epoch: 037/100 | Batch 000/469 | Gen/Dis Loss: 0.7420/0.6684 Epoch: 037/100 | Batch 100/469 | Gen/Dis Loss: 0.7959/0.6421 Epoch: 037/100 | Batch 200/469 | Gen/Dis Loss: 0.7376/0.6750 Epoch: 037/100 | Batch 300/469 | Gen/Dis Loss: 0.7477/0.6832 Epoch: 037/100 | Batch 400/469 | Gen/Dis Loss: 0.7619/0.6830 Time elapsed: 4.12 min Epoch: 038/100 | Batch 000/469 | Gen/Dis Loss: 0.7252/0.6732 Epoch: 038/100 | Batch 100/469 | Gen/Dis Loss: 0.7502/0.6642 Epoch: 038/100 | Batch 200/469 | Gen/Dis Loss: 0.7619/0.6498 Epoch: 038/100 | Batch 300/469 | Gen/Dis Loss: 0.7489/0.6596 Epoch: 038/100 | Batch 400/469 | Gen/Dis Loss: 0.7573/0.6793 Time elapsed: 4.24 min Epoch: 039/100 | Batch 000/469 | Gen/Dis Loss: 0.7450/0.6824 Epoch: 039/100 | Batch 100/469 | Gen/Dis Loss: 0.7102/0.6962 Epoch: 039/100 | Batch 200/469 | Gen/Dis Loss: 0.7620/0.6698 Epoch: 039/100 | Batch 300/469 | Gen/Dis Loss: 0.7568/0.6645 Epoch: 039/100 | Batch 400/469 | Gen/Dis Loss: 0.7472/0.6800 Time elapsed: 4.35 min Epoch: 040/100 | Batch 000/469 | Gen/Dis Loss: 0.7311/0.6580 Epoch: 040/100 | Batch 100/469 | Gen/Dis Loss: 0.7743/0.6728 Epoch: 040/100 | Batch 200/469 | Gen/Dis Loss: 0.7886/0.6702 Epoch: 040/100 | Batch 300/469 | Gen/Dis Loss: 0.7520/0.6706 Epoch: 040/100 | Batch 400/469 | Gen/Dis Loss: 0.6964/0.7169 Time elapsed: 4.46 min Epoch: 041/100 | Batch 000/469 | Gen/Dis Loss: 0.6846/0.6880 Epoch: 041/100 | Batch 100/469 | Gen/Dis Loss: 0.7384/0.7129 Epoch: 041/100 | Batch 200/469 | Gen/Dis Loss: 0.7519/0.6655 Epoch: 041/100 | Batch 300/469 | Gen/Dis Loss: 0.7274/0.6990 Epoch: 041/100 | Batch 400/469 | Gen/Dis Loss: 0.7331/0.6951 Time elapsed: 4.57 min Epoch: 042/100 | Batch 000/469 | Gen/Dis Loss: 0.7294/0.6838 Epoch: 042/100 | Batch 100/469 | Gen/Dis Loss: 0.7140/0.7100 Epoch: 042/100 | Batch 200/469 | Gen/Dis Loss: 0.7388/0.7204 Epoch: 042/100 | Batch 300/469 | Gen/Dis Loss: 0.7180/0.7000 Epoch: 042/100 | Batch 400/469 | Gen/Dis Loss: 0.7178/0.6689 Time elapsed: 4.69 min Epoch: 043/100 | Batch 000/469 | Gen/Dis Loss: 0.7270/0.6947 Epoch: 043/100 | Batch 100/469 | Gen/Dis Loss: 0.7115/0.6976 Epoch: 043/100 | Batch 200/469 | Gen/Dis Loss: 0.7201/0.6894 Epoch: 043/100 | Batch 300/469 | Gen/Dis Loss: 0.7675/0.6436 Epoch: 043/100 | Batch 400/469 | Gen/Dis Loss: 0.7404/0.6750 Time elapsed: 4.80 min Epoch: 044/100 | Batch 000/469 | Gen/Dis Loss: 0.7293/0.6487 Epoch: 044/100 | Batch 100/469 | Gen/Dis Loss: 0.7425/0.6837 Epoch: 044/100 | Batch 200/469 | Gen/Dis Loss: 0.7256/0.6883 Epoch: 044/100 | Batch 300/469 | Gen/Dis Loss: 0.7648/0.6885 Epoch: 044/100 | Batch 400/469 | Gen/Dis Loss: 0.7142/0.7148 Time elapsed: 4.91 min Epoch: 045/100 | Batch 000/469 | Gen/Dis Loss: 0.7462/0.6956 Epoch: 045/100 | Batch 100/469 | Gen/Dis Loss: 0.7608/0.6839 Epoch: 045/100 | Batch 200/469 | Gen/Dis Loss: 0.7397/0.6834 Epoch: 045/100 | Batch 300/469 | Gen/Dis Loss: 0.7230/0.6833 Epoch: 045/100 | Batch 400/469 | Gen/Dis Loss: 0.7004/0.7001 Time elapsed: 5.02 min Epoch: 046/100 | Batch 000/469 | Gen/Dis Loss: 0.7650/0.6575 Epoch: 046/100 | Batch 100/469 | Gen/Dis Loss: 0.7477/0.6786 Epoch: 046/100 | Batch 200/469 | Gen/Dis Loss: 0.7285/0.6782 Epoch: 046/100 | Batch 300/469 | Gen/Dis Loss: 0.7539/0.6911 Epoch: 046/100 | Batch 400/469 | Gen/Dis Loss: 0.7623/0.6656 Time elapsed: 5.14 min Epoch: 047/100 | Batch 000/469 | Gen/Dis Loss: 0.7219/0.6923 Epoch: 047/100 | Batch 100/469 | Gen/Dis Loss: 0.7046/0.7039 Epoch: 047/100 | Batch 200/469 | Gen/Dis Loss: 0.7205/0.7026 Epoch: 047/100 | Batch 300/469 | Gen/Dis Loss: 0.7077/0.6762 Epoch: 047/100 | Batch 400/469 | Gen/Dis Loss: 0.7201/0.6899 Time elapsed: 5.25 min Epoch: 048/100 | Batch 000/469 | Gen/Dis Loss: 0.7344/0.6843 Epoch: 048/100 | Batch 100/469 | Gen/Dis Loss: 0.7612/0.6683 Epoch: 048/100 | Batch 200/469 | Gen/Dis Loss: 0.7472/0.6694 Epoch: 048/100 | Batch 300/469 | Gen/Dis Loss: 0.6935/0.7073 Epoch: 048/100 | Batch 400/469 | Gen/Dis Loss: 0.7342/0.6662 Time elapsed: 5.36 min Epoch: 049/100 | Batch 000/469 | Gen/Dis Loss: 0.7221/0.6847 Epoch: 049/100 | Batch 100/469 | Gen/Dis Loss: 0.7354/0.6696 Epoch: 049/100 | Batch 200/469 | Gen/Dis Loss: 0.7264/0.7108 Epoch: 049/100 | Batch 300/469 | Gen/Dis Loss: 0.6834/0.6652 Epoch: 049/100 | Batch 400/469 | Gen/Dis Loss: 0.7081/0.7051 Time elapsed: 5.47 min Epoch: 050/100 | Batch 000/469 | Gen/Dis Loss: 0.7139/0.6842 Epoch: 050/100 | Batch 100/469 | Gen/Dis Loss: 0.7742/0.6910 Epoch: 050/100 | Batch 200/469 | Gen/Dis Loss: 0.7476/0.6880 Epoch: 050/100 | Batch 300/469 | Gen/Dis Loss: 0.7074/0.6915 Epoch: 050/100 | Batch 400/469 | Gen/Dis Loss: 0.7245/0.7140 Time elapsed: 5.59 min Epoch: 051/100 | Batch 000/469 | Gen/Dis Loss: 0.6999/0.6924 Epoch: 051/100 | Batch 100/469 | Gen/Dis Loss: 0.7025/0.6692 Epoch: 051/100 | Batch 200/469 | Gen/Dis Loss: 0.7119/0.6837 Epoch: 051/100 | Batch 300/469 | Gen/Dis Loss: 0.7111/0.6890 Epoch: 051/100 | Batch 400/469 | Gen/Dis Loss: 0.7205/0.6954 Time elapsed: 5.70 min Epoch: 052/100 | Batch 000/469 | Gen/Dis Loss: 0.7297/0.6751 Epoch: 052/100 | Batch 100/469 | Gen/Dis Loss: 0.7217/0.6760 Epoch: 052/100 | Batch 200/469 | Gen/Dis Loss: 0.7306/0.6846 Epoch: 052/100 | Batch 300/469 | Gen/Dis Loss: 0.7445/0.6858 Epoch: 052/100 | Batch 400/469 | Gen/Dis Loss: 0.7077/0.7021 Time elapsed: 5.81 min Epoch: 053/100 | Batch 000/469 | Gen/Dis Loss: 0.7324/0.6837 Epoch: 053/100 | Batch 100/469 | Gen/Dis Loss: 0.7112/0.6916 Epoch: 053/100 | Batch 200/469 | Gen/Dis Loss: 0.7360/0.6808 Epoch: 053/100 | Batch 300/469 | Gen/Dis Loss: 0.7265/0.6882 Epoch: 053/100 | Batch 400/469 | Gen/Dis Loss: 0.7481/0.6589 Time elapsed: 5.92 min Epoch: 054/100 | Batch 000/469 | Gen/Dis Loss: 0.7060/0.6839 Epoch: 054/100 | Batch 100/469 | Gen/Dis Loss: 0.7126/0.7005 Epoch: 054/100 | Batch 200/469 | Gen/Dis Loss: 0.6884/0.6986 Epoch: 054/100 | Batch 300/469 | Gen/Dis Loss: 0.7367/0.6747 Epoch: 054/100 | Batch 400/469 | Gen/Dis Loss: 0.7275/0.6843 Time elapsed: 6.04 min Epoch: 055/100 | Batch 000/469 | Gen/Dis Loss: 0.7524/0.6896 Epoch: 055/100 | Batch 100/469 | Gen/Dis Loss: 0.7015/0.7009 Epoch: 055/100 | Batch 200/469 | Gen/Dis Loss: 0.7214/0.6864 Epoch: 055/100 | Batch 300/469 | Gen/Dis Loss: 0.7363/0.6745 Epoch: 055/100 | Batch 400/469 | Gen/Dis Loss: 0.6750/0.7052 Time elapsed: 6.15 min Epoch: 056/100 | Batch 000/469 | Gen/Dis Loss: 0.7154/0.6726 Epoch: 056/100 | Batch 100/469 | Gen/Dis Loss: 0.7504/0.6915 Epoch: 056/100 | Batch 200/469 | Gen/Dis Loss: 0.7247/0.6801 Epoch: 056/100 | Batch 300/469 | Gen/Dis Loss: 0.7104/0.7094 Epoch: 056/100 | Batch 400/469 | Gen/Dis Loss: 0.7478/0.6648 Time elapsed: 6.26 min Epoch: 057/100 | Batch 000/469 | Gen/Dis Loss: 0.6971/0.7020 Epoch: 057/100 | Batch 100/469 | Gen/Dis Loss: 0.6913/0.6889 Epoch: 057/100 | Batch 200/469 | Gen/Dis Loss: 0.7104/0.7075 Epoch: 057/100 | Batch 300/469 | Gen/Dis Loss: 0.7412/0.6714 Epoch: 057/100 | Batch 400/469 | Gen/Dis Loss: 0.7036/0.6781 Time elapsed: 6.37 min Epoch: 058/100 | Batch 000/469 | Gen/Dis Loss: 0.6931/0.6905 Epoch: 058/100 | Batch 100/469 | Gen/Dis Loss: 0.6890/0.7168 Epoch: 058/100 | Batch 200/469 | Gen/Dis Loss: 0.6954/0.6778 Epoch: 058/100 | Batch 300/469 | Gen/Dis Loss: 0.7247/0.6895 Epoch: 058/100 | Batch 400/469 | Gen/Dis Loss: 0.7134/0.6972 Time elapsed: 6.48 min Epoch: 059/100 | Batch 000/469 | Gen/Dis Loss: 0.6965/0.7357 Epoch: 059/100 | Batch 100/469 | Gen/Dis Loss: 0.6956/0.7342 Epoch: 059/100 | Batch 200/469 | Gen/Dis Loss: 0.7206/0.6850 Epoch: 059/100 | Batch 300/469 | Gen/Dis Loss: 0.7191/0.6857 Epoch: 059/100 | Batch 400/469 | Gen/Dis Loss: 0.7171/0.6985 Time elapsed: 6.59 min Epoch: 060/100 | Batch 000/469 | Gen/Dis Loss: 0.7303/0.6821 Epoch: 060/100 | Batch 100/469 | Gen/Dis Loss: 0.7202/0.6773 Epoch: 060/100 | Batch 200/469 | Gen/Dis Loss: 0.6943/0.6867 Epoch: 060/100 | Batch 300/469 | Gen/Dis Loss: 0.7005/0.7014 Epoch: 060/100 | Batch 400/469 | Gen/Dis Loss: 0.7191/0.6868 Time elapsed: 6.71 min Epoch: 061/100 | Batch 000/469 | Gen/Dis Loss: 0.7571/0.6741 Epoch: 061/100 | Batch 100/469 | Gen/Dis Loss: 0.7170/0.6848 Epoch: 061/100 | Batch 200/469 | Gen/Dis Loss: 0.6860/0.7175 Epoch: 061/100 | Batch 300/469 | Gen/Dis Loss: 0.7386/0.6894 Epoch: 061/100 | Batch 400/469 | Gen/Dis Loss: 0.6806/0.7394 Time elapsed: 6.82 min Epoch: 062/100 | Batch 000/469 | Gen/Dis Loss: 0.7045/0.6885 Epoch: 062/100 | Batch 100/469 | Gen/Dis Loss: 0.7079/0.6893 Epoch: 062/100 | Batch 200/469 | Gen/Dis Loss: 0.7169/0.6902 Epoch: 062/100 | Batch 300/469 | Gen/Dis Loss: 0.7155/0.7040 Epoch: 062/100 | Batch 400/469 | Gen/Dis Loss: 0.7118/0.6924 Time elapsed: 6.93 min Epoch: 063/100 | Batch 000/469 | Gen/Dis Loss: 0.7376/0.6885 Epoch: 063/100 | Batch 100/469 | Gen/Dis Loss: 0.7123/0.6907 Epoch: 063/100 | Batch 200/469 | Gen/Dis Loss: 0.6990/0.7137 Epoch: 063/100 | Batch 300/469 | Gen/Dis Loss: 0.7400/0.6873 Epoch: 063/100 | Batch 400/469 | Gen/Dis Loss: 0.7067/0.6900 Time elapsed: 7.04 min Epoch: 064/100 | Batch 000/469 | Gen/Dis Loss: 0.7243/0.6886 Epoch: 064/100 | Batch 100/469 | Gen/Dis Loss: 0.7371/0.6765 Epoch: 064/100 | Batch 200/469 | Gen/Dis Loss: 0.7082/0.7039 Epoch: 064/100 | Batch 300/469 | Gen/Dis Loss: 0.7217/0.6830 Epoch: 064/100 | Batch 400/469 | Gen/Dis Loss: 0.6962/0.7046 Time elapsed: 7.15 min Epoch: 065/100 | Batch 000/469 | Gen/Dis Loss: 0.6920/0.7042 Epoch: 065/100 | Batch 100/469 | Gen/Dis Loss: 0.7391/0.6880 Epoch: 065/100 | Batch 200/469 | Gen/Dis Loss: 0.7179/0.6903 Epoch: 065/100 | Batch 300/469 | Gen/Dis Loss: 0.7057/0.7100 Epoch: 065/100 | Batch 400/469 | Gen/Dis Loss: 0.6986/0.7246 Time elapsed: 7.27 min Epoch: 066/100 | Batch 000/469 | Gen/Dis Loss: 0.6816/0.7142 Epoch: 066/100 | Batch 100/469 | Gen/Dis Loss: 0.7280/0.7187 Epoch: 066/100 | Batch 200/469 | Gen/Dis Loss: 0.6893/0.6917 Epoch: 066/100 | Batch 300/469 | Gen/Dis Loss: 0.6783/0.7212 Epoch: 066/100 | Batch 400/469 | Gen/Dis Loss: 0.7077/0.7035 Time elapsed: 7.38 min Epoch: 067/100 | Batch 000/469 | Gen/Dis Loss: 0.6741/0.7281 Epoch: 067/100 | Batch 100/469 | Gen/Dis Loss: 0.7054/0.6934 Epoch: 067/100 | Batch 200/469 | Gen/Dis Loss: 0.6950/0.7007 Epoch: 067/100 | Batch 300/469 | Gen/Dis Loss: 0.7220/0.6823 Epoch: 067/100 | Batch 400/469 | Gen/Dis Loss: 0.7056/0.6786 Time elapsed: 7.49 min Epoch: 068/100 | Batch 000/469 | Gen/Dis Loss: 0.7149/0.6899 Epoch: 068/100 | Batch 100/469 | Gen/Dis Loss: 0.6831/0.7080 Epoch: 068/100 | Batch 200/469 | Gen/Dis Loss: 0.7129/0.6912 Epoch: 068/100 | Batch 300/469 | Gen/Dis Loss: 0.6805/0.7043 Epoch: 068/100 | Batch 400/469 | Gen/Dis Loss: 0.7198/0.6862 Time elapsed: 7.60 min Epoch: 069/100 | Batch 000/469 | Gen/Dis Loss: 0.7155/0.6956 Epoch: 069/100 | Batch 100/469 | Gen/Dis Loss: 0.6647/0.7004 Epoch: 069/100 | Batch 200/469 | Gen/Dis Loss: 0.7101/0.7041 Epoch: 069/100 | Batch 300/469 | Gen/Dis Loss: 0.6902/0.7165 Epoch: 069/100 | Batch 400/469 | Gen/Dis Loss: 0.6958/0.6964 Time elapsed: 7.71 min Epoch: 070/100 | Batch 000/469 | Gen/Dis Loss: 0.7307/0.6867 Epoch: 070/100 | Batch 100/469 | Gen/Dis Loss: 0.7059/0.7004 Epoch: 070/100 | Batch 200/469 | Gen/Dis Loss: 0.7439/0.6578 Epoch: 070/100 | Batch 300/469 | Gen/Dis Loss: 0.7060/0.6918 Epoch: 070/100 | Batch 400/469 | Gen/Dis Loss: 0.7133/0.7115 Time elapsed: 7.82 min Epoch: 071/100 | Batch 000/469 | Gen/Dis Loss: 0.6912/0.7067 Epoch: 071/100 | Batch 100/469 | Gen/Dis Loss: 0.7607/0.6689 Epoch: 071/100 | Batch 200/469 | Gen/Dis Loss: 0.7093/0.7079 Epoch: 071/100 | Batch 300/469 | Gen/Dis Loss: 0.6586/0.7082 Epoch: 071/100 | Batch 400/469 | Gen/Dis Loss: 0.7373/0.6887 Time elapsed: 7.93 min Epoch: 072/100 | Batch 000/469 | Gen/Dis Loss: 0.6848/0.7035 Epoch: 072/100 | Batch 100/469 | Gen/Dis Loss: 0.7595/0.6834 Epoch: 072/100 | Batch 200/469 | Gen/Dis Loss: 0.7200/0.6894 Epoch: 072/100 | Batch 300/469 | Gen/Dis Loss: 0.7027/0.6875 Epoch: 072/100 | Batch 400/469 | Gen/Dis Loss: 0.6932/0.6923 Time elapsed: 8.05 min Epoch: 073/100 | Batch 000/469 | Gen/Dis Loss: 0.7331/0.6827 Epoch: 073/100 | Batch 100/469 | Gen/Dis Loss: 0.7196/0.6982 Epoch: 073/100 | Batch 200/469 | Gen/Dis Loss: 0.7102/0.6970 Epoch: 073/100 | Batch 300/469 | Gen/Dis Loss: 0.7046/0.6993 Epoch: 073/100 | Batch 400/469 | Gen/Dis Loss: 0.7000/0.7084 Time elapsed: 8.16 min Epoch: 074/100 | Batch 000/469 | Gen/Dis Loss: 0.7173/0.6942 Epoch: 074/100 | Batch 100/469 | Gen/Dis Loss: 0.6864/0.7120 Epoch: 074/100 | Batch 200/469 | Gen/Dis Loss: 0.7195/0.7070 Epoch: 074/100 | Batch 300/469 | Gen/Dis Loss: 0.7188/0.6861 Epoch: 074/100 | Batch 400/469 | Gen/Dis Loss: 0.7031/0.7074 Time elapsed: 8.27 min Epoch: 075/100 | Batch 000/469 | Gen/Dis Loss: 0.7215/0.6806 Epoch: 075/100 | Batch 100/469 | Gen/Dis Loss: 0.7210/0.6833 Epoch: 075/100 | Batch 200/469 | Gen/Dis Loss: 0.6988/0.7080 Epoch: 075/100 | Batch 300/469 | Gen/Dis Loss: 0.6966/0.6966 Epoch: 075/100 | Batch 400/469 | Gen/Dis Loss: 0.7061/0.6977 Time elapsed: 8.38 min Epoch: 076/100 | Batch 000/469 | Gen/Dis Loss: 0.6956/0.6920 Epoch: 076/100 | Batch 100/469 | Gen/Dis Loss: 0.7220/0.6964 Epoch: 076/100 | Batch 200/469 | Gen/Dis Loss: 0.7163/0.6708 Epoch: 076/100 | Batch 300/469 | Gen/Dis Loss: 0.7021/0.6787 Epoch: 076/100 | Batch 400/469 | Gen/Dis Loss: 0.7039/0.6956 Time elapsed: 8.50 min Epoch: 077/100 | Batch 000/469 | Gen/Dis Loss: 0.7370/0.6646 Epoch: 077/100 | Batch 100/469 | Gen/Dis Loss: 0.7025/0.6821 Epoch: 077/100 | Batch 200/469 | Gen/Dis Loss: 0.6841/0.7062 Epoch: 077/100 | Batch 300/469 | Gen/Dis Loss: 0.7053/0.7047 Epoch: 077/100 | Batch 400/469 | Gen/Dis Loss: 0.7183/0.6782 Time elapsed: 8.61 min Epoch: 078/100 | Batch 000/469 | Gen/Dis Loss: 0.7611/0.6732 Epoch: 078/100 | Batch 100/469 | Gen/Dis Loss: 0.7201/0.6677 Epoch: 078/100 | Batch 200/469 | Gen/Dis Loss: 0.7439/0.6816 Epoch: 078/100 | Batch 300/469 | Gen/Dis Loss: 0.7000/0.7021 Epoch: 078/100 | Batch 400/469 | Gen/Dis Loss: 0.7169/0.6828 Time elapsed: 8.72 min Epoch: 079/100 | Batch 000/469 | Gen/Dis Loss: 0.7054/0.6965 Epoch: 079/100 | Batch 100/469 | Gen/Dis Loss: 0.6971/0.6895 Epoch: 079/100 | Batch 200/469 | Gen/Dis Loss: 0.7160/0.6846 Epoch: 079/100 | Batch 300/469 | Gen/Dis Loss: 0.6826/0.6978 Epoch: 079/100 | Batch 400/469 | Gen/Dis Loss: 0.7057/0.6950 Time elapsed: 8.83 min Epoch: 080/100 | Batch 000/469 | Gen/Dis Loss: 0.6850/0.7016 Epoch: 080/100 | Batch 100/469 | Gen/Dis Loss: 0.6841/0.6875 Epoch: 080/100 | Batch 200/469 | Gen/Dis Loss: 0.7536/0.6677 Epoch: 080/100 | Batch 300/469 | Gen/Dis Loss: 0.6795/0.6986 Epoch: 080/100 | Batch 400/469 | Gen/Dis Loss: 0.7447/0.6731 Time elapsed: 8.94 min Epoch: 081/100 | Batch 000/469 | Gen/Dis Loss: 0.7040/0.6826 Epoch: 081/100 | Batch 100/469 | Gen/Dis Loss: 0.7045/0.6913 Epoch: 081/100 | Batch 200/469 | Gen/Dis Loss: 0.7123/0.6989 Epoch: 081/100 | Batch 300/469 | Gen/Dis Loss: 0.7318/0.6747 Epoch: 081/100 | Batch 400/469 | Gen/Dis Loss: 0.7733/0.6524 Time elapsed: 9.05 min Epoch: 082/100 | Batch 000/469 | Gen/Dis Loss: 0.7068/0.6799 Epoch: 082/100 | Batch 100/469 | Gen/Dis Loss: 0.7125/0.6855 Epoch: 082/100 | Batch 200/469 | Gen/Dis Loss: 0.6916/0.7135 Epoch: 082/100 | Batch 300/469 | Gen/Dis Loss: 0.7113/0.6951 Epoch: 082/100 | Batch 400/469 | Gen/Dis Loss: 0.7277/0.6706 Time elapsed: 9.16 min Epoch: 083/100 | Batch 000/469 | Gen/Dis Loss: 0.7007/0.6930 Epoch: 083/100 | Batch 100/469 | Gen/Dis Loss: 0.7295/0.6822 Epoch: 083/100 | Batch 200/469 | Gen/Dis Loss: 0.6925/0.7041 Epoch: 083/100 | Batch 300/469 | Gen/Dis Loss: 0.6961/0.6973 Epoch: 083/100 | Batch 400/469 | Gen/Dis Loss: 0.7217/0.6856 Time elapsed: 9.27 min Epoch: 084/100 | Batch 000/469 | Gen/Dis Loss: 0.7234/0.6867 Epoch: 084/100 | Batch 100/469 | Gen/Dis Loss: 0.7126/0.6865 Epoch: 084/100 | Batch 200/469 | Gen/Dis Loss: 0.6872/0.7103 Epoch: 084/100 | Batch 300/469 | Gen/Dis Loss: 0.7059/0.7026 Epoch: 084/100 | Batch 400/469 | Gen/Dis Loss: 0.6938/0.7042 Time elapsed: 9.39 min Epoch: 085/100 | Batch 000/469 | Gen/Dis Loss: 0.7063/0.6872 Epoch: 085/100 | Batch 100/469 | Gen/Dis Loss: 0.6863/0.6962 Epoch: 085/100 | Batch 200/469 | Gen/Dis Loss: 0.6890/0.7008 Epoch: 085/100 | Batch 300/469 | Gen/Dis Loss: 0.7016/0.6896 Epoch: 085/100 | Batch 400/469 | Gen/Dis Loss: 0.6955/0.6955 Time elapsed: 9.50 min Epoch: 086/100 | Batch 000/469 | Gen/Dis Loss: 0.7243/0.6873 Epoch: 086/100 | Batch 100/469 | Gen/Dis Loss: 0.6724/0.7171 Epoch: 086/100 | Batch 200/469 | Gen/Dis Loss: 0.7003/0.7063 Epoch: 086/100 | Batch 300/469 | Gen/Dis Loss: 0.7089/0.6931 Epoch: 086/100 | Batch 400/469 | Gen/Dis Loss: 0.7517/0.6918 Time elapsed: 9.61 min Epoch: 087/100 | Batch 000/469 | Gen/Dis Loss: 0.7063/0.6841 Epoch: 087/100 | Batch 100/469 | Gen/Dis Loss: 0.7123/0.6929 Epoch: 087/100 | Batch 200/469 | Gen/Dis Loss: 0.6703/0.7148 Epoch: 087/100 | Batch 300/469 | Gen/Dis Loss: 0.7069/0.7158 Epoch: 087/100 | Batch 400/469 | Gen/Dis Loss: 0.7490/0.6743 Time elapsed: 9.73 min Epoch: 088/100 | Batch 000/469 | Gen/Dis Loss: 0.7264/0.6813 Epoch: 088/100 | Batch 100/469 | Gen/Dis Loss: 0.7190/0.6846 Epoch: 088/100 | Batch 200/469 | Gen/Dis Loss: 0.6913/0.7160 Epoch: 088/100 | Batch 300/469 | Gen/Dis Loss: 0.6650/0.6961 Epoch: 088/100 | Batch 400/469 | Gen/Dis Loss: 0.7104/0.6991 Time elapsed: 9.84 min Epoch: 089/100 | Batch 000/469 | Gen/Dis Loss: 0.7135/0.6879 Epoch: 089/100 | Batch 100/469 | Gen/Dis Loss: 0.7040/0.7015 Epoch: 089/100 | Batch 200/469 | Gen/Dis Loss: 0.7341/0.6976 Epoch: 089/100 | Batch 300/469 | Gen/Dis Loss: 0.6865/0.6966 Epoch: 089/100 | Batch 400/469 | Gen/Dis Loss: 0.7150/0.6960 Time elapsed: 9.95 min Epoch: 090/100 | Batch 000/469 | Gen/Dis Loss: 0.6853/0.6989 Epoch: 090/100 | Batch 100/469 | Gen/Dis Loss: 0.6962/0.6915 Epoch: 090/100 | Batch 200/469 | Gen/Dis Loss: 0.6979/0.6917 Epoch: 090/100 | Batch 300/469 | Gen/Dis Loss: 0.7116/0.6933 Epoch: 090/100 | Batch 400/469 | Gen/Dis Loss: 0.6530/0.6941 Time elapsed: 10.06 min Epoch: 091/100 | Batch 000/469 | Gen/Dis Loss: 0.7075/0.6953 Epoch: 091/100 | Batch 100/469 | Gen/Dis Loss: 0.7377/0.6750 Epoch: 091/100 | Batch 200/469 | Gen/Dis Loss: 0.7118/0.6915 Epoch: 091/100 | Batch 300/469 | Gen/Dis Loss: 0.7187/0.6842 Epoch: 091/100 | Batch 400/469 | Gen/Dis Loss: 0.7158/0.6845 Time elapsed: 10.17 min Epoch: 092/100 | Batch 000/469 | Gen/Dis Loss: 0.7106/0.6776 Epoch: 092/100 | Batch 100/469 | Gen/Dis Loss: 0.6952/0.7033 Epoch: 092/100 | Batch 200/469 | Gen/Dis Loss: 0.6871/0.7304 Epoch: 092/100 | Batch 300/469 | Gen/Dis Loss: 0.7164/0.6925 Epoch: 092/100 | Batch 400/469 | Gen/Dis Loss: 0.7089/0.6981 Time elapsed: 10.28 min Epoch: 093/100 | Batch 000/469 | Gen/Dis Loss: 0.7067/0.6999 Epoch: 093/100 | Batch 100/469 | Gen/Dis Loss: 0.7307/0.6621 Epoch: 093/100 | Batch 200/469 | Gen/Dis Loss: 0.7297/0.6933 Epoch: 093/100 | Batch 300/469 | Gen/Dis Loss: 0.6832/0.7059 Epoch: 093/100 | Batch 400/469 | Gen/Dis Loss: 0.6968/0.6953 Time elapsed: 10.40 min Epoch: 094/100 | Batch 000/469 | Gen/Dis Loss: 0.6928/0.7056 Epoch: 094/100 | Batch 100/469 | Gen/Dis Loss: 0.7187/0.6848 Epoch: 094/100 | Batch 200/469 | Gen/Dis Loss: 0.7341/0.6744 Epoch: 094/100 | Batch 300/469 | Gen/Dis Loss: 0.7313/0.6805 Epoch: 094/100 | Batch 400/469 | Gen/Dis Loss: 0.7286/0.6766 Time elapsed: 10.51 min Epoch: 095/100 | Batch 000/469 | Gen/Dis Loss: 0.6949/0.6977 Epoch: 095/100 | Batch 100/469 | Gen/Dis Loss: 0.7209/0.6785 Epoch: 095/100 | Batch 200/469 | Gen/Dis Loss: 0.6830/0.6961 Epoch: 095/100 | Batch 300/469 | Gen/Dis Loss: 0.6924/0.7129 Epoch: 095/100 | Batch 400/469 | Gen/Dis Loss: 0.6994/0.6977 Time elapsed: 10.62 min Epoch: 096/100 | Batch 000/469 | Gen/Dis Loss: 0.7030/0.6872 Epoch: 096/100 | Batch 100/469 | Gen/Dis Loss: 0.7184/0.6817 Epoch: 096/100 | Batch 200/469 | Gen/Dis Loss: 0.6800/0.7069 Epoch: 096/100 | Batch 300/469 | Gen/Dis Loss: 0.6787/0.7176 Epoch: 096/100 | Batch 400/469 | Gen/Dis Loss: 0.7195/0.6742 Time elapsed: 10.73 min Epoch: 097/100 | Batch 000/469 | Gen/Dis Loss: 0.7010/0.7022 Epoch: 097/100 | Batch 100/469 | Gen/Dis Loss: 0.7229/0.6837 Epoch: 097/100 | Batch 200/469 | Gen/Dis Loss: 0.7249/0.6841 Epoch: 097/100 | Batch 300/469 | Gen/Dis Loss: 0.6825/0.7077 Epoch: 097/100 | Batch 400/469 | Gen/Dis Loss: 0.7154/0.6792 Time elapsed: 10.84 min Epoch: 098/100 | Batch 000/469 | Gen/Dis Loss: 0.7026/0.6956 Epoch: 098/100 | Batch 100/469 | Gen/Dis Loss: 0.7014/0.6958 Epoch: 098/100 | Batch 200/469 | Gen/Dis Loss: 0.6935/0.6819 Epoch: 098/100 | Batch 300/469 | Gen/Dis Loss: 0.6863/0.6962 Epoch: 098/100 | Batch 400/469 | Gen/Dis Loss: 0.7081/0.7027 Time elapsed: 10.95 min Epoch: 099/100 | Batch 000/469 | Gen/Dis Loss: 0.6756/0.7142 Epoch: 099/100 | Batch 100/469 | Gen/Dis Loss: 0.7176/0.6848 Epoch: 099/100 | Batch 200/469 | Gen/Dis Loss: 0.7229/0.6893 Epoch: 099/100 | Batch 300/469 | Gen/Dis Loss: 0.7109/0.6857 Epoch: 099/100 | Batch 400/469 | Gen/Dis Loss: 0.7048/0.6964 Time elapsed: 11.07 min Epoch: 100/100 | Batch 000/469 | Gen/Dis Loss: 0.6977/0.6980 Epoch: 100/100 | Batch 100/469 | Gen/Dis Loss: 0.7075/0.6928 Epoch: 100/100 | Batch 200/469 | Gen/Dis Loss: 0.7012/0.6905 Epoch: 100/100 | Batch 300/469 | Gen/Dis Loss: 0.7103/0.6850 Epoch: 100/100 | Batch 400/469 | Gen/Dis Loss: 0.6911/0.7090 Time elapsed: 11.18 min Total Training Time: 11.18 min
### For Debugging
"""
for i in outputs:
print(i.size())
"""
'\nfor i in outputs:\n print(i.size())\n'
%matplotlib inline
import matplotlib.pyplot as plt
plt.plot(range(len(gener_costs)), gener_costs, label='generator loss')
plt.plot(range(len(discr_costs)), discr_costs, label='discriminator loss')
plt.legend()
plt.show()
##########################
### VISUALIZATION
##########################
model.eval()
# Make new images
z = torch.zeros((5, LATENT_DIM)).uniform_(-1.0, 1.0).to(device)
generated_features = model.generator_forward(z)
imgs = generated_features.view(-1, 28, 28)
fig, axes = plt.subplots(nrows=1, ncols=5, figsize=(20, 2.5))
for i, ax in enumerate(axes):
axes[i].imshow(imgs[i].to(torch.device('cpu')).detach(), cmap='binary')
from torchsummary import summary
model = model.to('cuda:0')
summary(model.generator, input_size=(100,))
summary(model.discriminator, input_size=(1, 28, 28))
---------------------------------------------------------------- Layer (type) Output Shape Param # ================================================================ Linear-1 [-1, 3136] 313,600 BatchNorm1d-2 [-1, 3136] 6,272 LeakyReLU-3 [-1, 3136] 0 Reshape1-4 [-1, 64, 7, 7] 0 ConvTranspose2d-5 [-1, 32, 13, 13] 18,432 BatchNorm2d-6 [-1, 32, 13, 13] 64 LeakyReLU-7 [-1, 32, 13, 13] 0 ConvTranspose2d-8 [-1, 16, 25, 25] 4,608 BatchNorm2d-9 [-1, 16, 25, 25] 32 LeakyReLU-10 [-1, 16, 25, 25] 0 ConvTranspose2d-11 [-1, 8, 27, 27] 1,152 BatchNorm2d-12 [-1, 8, 27, 27] 16 LeakyReLU-13 [-1, 8, 27, 27] 0 ConvTranspose2d-14 [-1, 1, 28, 28] 32 Tanh-15 [-1, 1, 28, 28] 0 ================================================================ Total params: 344,208 Trainable params: 344,208 Non-trainable params: 0 ---------------------------------------------------------------- Input size (MB): 0.00 Forward/backward pass size (MB): 0.59 Params size (MB): 1.31 Estimated Total Size (MB): 1.91 ---------------------------------------------------------------- ---------------------------------------------------------------- Layer (type) Output Shape Param # ================================================================ Conv2d-1 [-1, 8, 14, 14] 72 BatchNorm2d-2 [-1, 8, 14, 14] 16 LeakyReLU-3 [-1, 8, 14, 14] 0 Conv2d-4 [-1, 32, 7, 7] 2,304 BatchNorm2d-5 [-1, 32, 7, 7] 64 LeakyReLU-6 [-1, 32, 7, 7] 0 Flatten-7 [-1, 1568] 0 Linear-8 [-1, 1] 1,569 ================================================================ Total params: 4,025 Trainable params: 4,025 Non-trainable params: 0 ---------------------------------------------------------------- Input size (MB): 0.00 Forward/backward pass size (MB): 0.08 Params size (MB): 0.02 Estimated Total Size (MB): 0.10 ----------------------------------------------------------------