Vanilla GAN for MNIST (PyTorch)

vannila_gan.ipynb で うまく行かなかった(Dが0に収束してしまう)ので、以下の改善を行う。

  • ReLU の 代わりにLeakyReLUを使う。
  • BatchNormalizationを使う。
  • Adam の 学習率を小さくする。
  • ノイズは正規分布からサンプリングする。
  • ネットワークのニューロン数を変更する。

参考:

In [3]:
% matplotlib inline
import torch
import torch.optim as optim
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable

if torch.cuda.is_available():
    import torch.cuda as t
else:
    import torch as t

from torchvision import datasets, models, transforms, utils
import torchvision.utils as vutils

import numpy as np
from numpy.random import normal
import matplotlib.pyplot as plt
import os

mnist datasetの準備

In [4]:
bs = 100
In [5]:
dataloader = torch.utils.data.DataLoader(
    datasets.MNIST('data/mnist', train=True, download=True,
                   transform=transforms.Compose([
                       transforms.ToTensor()
                   ])),
    batch_size=bs
)

Model

In [6]:
'''Discriminater'''
class netD(nn.Module):
    def __init__(self):
        super(netD, self).__init__()
        self.main = nn.Sequential(
            nn.Linear(784, 512),
            nn.LeakyReLU(0.2, inplace=True),
            nn.Linear(512, 256),
            nn.LeakyReLU(0.2, inplace=True),
            nn.Linear(256, 1),
            nn.Sigmoid()
        )

    def forward(self, x):
        x = x.view(x.size(0), 784)
        x = self.main(x)
        return x

'''Generator'''
class netG(nn.Module):
    def __init__(self):
        super(netG, self).__init__()
        self.main = nn.Sequential(
            nn.Linear(100, 256),
            nn.BatchNorm1d(256),
            nn.LeakyReLU(0.2, inplace=True),
            nn.Linear(256, 512),
            nn.BatchNorm1d(512),
            nn.LeakyReLU(0.2, inplace=True),
            nn.Linear(512, 1024),
            nn.BatchNorm1d(1024),
            nn.LeakyReLU(0.2, inplace=True),
            nn.Linear(1024, 1*28*28),
            nn.Sigmoid()
        )

    def forward(self, x):
        x = x.view(bs,100)
        x = self.main(x)
        x = x.view(-1, 1, 28, 28)
        return x
In [7]:
criteion = nn.BCELoss()
net_D = netD()
net_G = netG()

if torch.cuda.is_available():
    D = net_D.cuda()
    G = net_G.cuda()
    criteion = criteion.cuda()    
In [9]:
optimizerD = optim.Adam(net_D.parameters(), lr = 0.00005)
optimizerG = optim.Adam(net_G.parameters(), lr = 0.00005)

Train

In [10]:
input = t.FloatTensor(bs, 1, 28, 28)
noise = t.FloatTensor(normal(0, 1,(bs, 100, 1, 1)))
fixed_noise = t.FloatTensor(bs, 100, 1, 1).normal_(0, 1)
label = t.FloatTensor(bs)

real_label = 1
fake_label = 0

input = Variable(input)
label = Variable(label)
noise = Variable(noise)
fixed_noise = Variable(fixed_noise)
In [11]:
niter = 4000
In [12]:
for epoch in range(niter):
    for i, data in enumerate(dataloader, 0):
        ############################
        # (1) Update D network: maximize log(D(x)) + log(1 - D(G(z)))
        ###########################
        # train with real (data)
        net_D.zero_grad()
        real, _ = data
        input.data.resize_(real.size()).copy_(real)
        label.data.resize_(bs).fill_(real_label)

        output = net_D(input)
        errD_real = criteion(output, label)
        errD_real.backward()
        D_x = output.data.mean()

        #train with fake (generated)
        noise.data.resize_(bs, 100, 1, 1)
        noise.data.normal_(0, 1)
        fake = net_G(noise)
        label.data.fill_(fake_label)
        output = net_D(fake.detach())
        errD_fake = criteion(output, label)
        errD_fake.backward()
        D_G_z1 = output.data.mean()

        errD = errD_real + errD_fake
        optimizerD.step()

        ############################
        # (2) Update G network: maximize log(D(G(z)))
        ###########################
        net_G.zero_grad()
        label.data.fill_(real_label)
        output = net_D(fake)
        errG = criteion(output, label)
        errG.backward()
        D_G_z2 = output.data.mean()
        optimizerG.step()
        if i % 100 == 0:
            print('[%d/%d][%d/%d] Loss_D: %.4f Loss_G: %.4f D(x): %.4f D(G(z)): %.4f / %.4f'
                 % (epoch, niter, i, len(dataloader),
                   errD.data[0], errG.data[0],  D_x, D_G_z1, D_G_z2))
    if epoch % 10 == 0:
        fake = net_G(fixed_noise)
        vutils.save_image(fake.data, '%s/fake_samples_epoch_%03d.png'
                              % ('results', epoch),normalize=True)
[0/4000][0/600] Loss_D: 1.3885 Loss_G: 0.7193 D(x): 0.5018 D(G(z)): 0.5029 / 0.4871
[0/4000][100/600] Loss_D: 0.2039 Loss_G: 4.1782 D(x): 0.8943 D(G(z)): 0.0672 / 0.0608
[0/4000][200/600] Loss_D: 0.3222 Loss_G: 3.9727 D(x): 0.8210 D(G(z)): 0.0836 / 0.0762
[0/4000][300/600] Loss_D: 0.2737 Loss_G: 2.9381 D(x): 0.8235 D(G(z)): 0.0632 / 0.0626
[0/4000][400/600] Loss_D: 0.1356 Loss_G: 3.3121 D(x): 0.9232 D(G(z)): 0.0521 / 0.0406
[0/4000][500/600] Loss_D: 0.1118 Loss_G: 3.3953 D(x): 0.9374 D(G(z)): 0.0428 / 0.0367
[1/4000][0/600] Loss_D: 0.0891 Loss_G: 3.9906 D(x): 0.9615 D(G(z)): 0.0463 / 0.0256
[1/4000][100/600] Loss_D: 0.1392 Loss_G: 3.1369 D(x): 0.9465 D(G(z)): 0.0652 / 0.0549
[1/4000][200/600] Loss_D: 0.1490 Loss_G: 3.5705 D(x): 0.9557 D(G(z)): 0.0707 / 0.0501
[1/4000][300/600] Loss_D: 0.1450 Loss_G: 3.3625 D(x): 0.9671 D(G(z)): 0.0888 / 0.0581
[1/4000][400/600] Loss_D: 0.1139 Loss_G: 3.3370 D(x): 0.9723 D(G(z)): 0.0782 / 0.0482
[1/4000][500/600] Loss_D: 0.2371 Loss_G: 2.9955 D(x): 0.9235 D(G(z)): 0.0951 / 0.0676
[2/4000][0/600] Loss_D: 0.2712 Loss_G: 3.1270 D(x): 0.9171 D(G(z)): 0.1225 / 0.0849
[2/4000][100/600] Loss_D: 0.2722 Loss_G: 3.7160 D(x): 0.9201 D(G(z)): 0.1158 / 0.0948
[2/4000][200/600] Loss_D: 0.1986 Loss_G: 4.1082 D(x): 0.9617 D(G(z)): 0.1039 / 0.0720
[2/4000][300/600] Loss_D: 0.1760 Loss_G: 4.1830 D(x): 0.9602 D(G(z)): 0.0742 / 0.0570
[2/4000][400/600] Loss_D: 0.3505 Loss_G: 3.7002 D(x): 0.9014 D(G(z)): 0.0894 / 0.0646
[2/4000][500/600] Loss_D: 0.3939 Loss_G: 2.2979 D(x): 0.8788 D(G(z)): 0.1603 / 0.1508
[3/4000][0/600] Loss_D: 0.3538 Loss_G: 2.5244 D(x): 0.8994 D(G(z)): 0.1405 / 0.1289
[3/4000][100/600] Loss_D: 0.1664 Loss_G: 2.9389 D(x): 0.9714 D(G(z)): 0.1004 / 0.0886
[3/4000][200/600] Loss_D: 0.2222 Loss_G: 2.9919 D(x): 0.9638 D(G(z)): 0.1059 / 0.0916
[3/4000][300/600] Loss_D: 0.2514 Loss_G: 2.7258 D(x): 0.9506 D(G(z)): 0.1109 / 0.0952
[3/4000][400/600] Loss_D: 0.2591 Loss_G: 2.7858 D(x): 0.9091 D(G(z)): 0.0914 / 0.0815
[3/4000][500/600] Loss_D: 0.2596 Loss_G: 2.9744 D(x): 0.9055 D(G(z)): 0.0939 / 0.0962
[4/4000][0/600] Loss_D: 0.2063 Loss_G: 3.2386 D(x): 0.9363 D(G(z)): 0.0815 / 0.0656
[4/4000][100/600] Loss_D: 0.1462 Loss_G: 3.1894 D(x): 0.9608 D(G(z)): 0.0836 / 0.0641
[4/4000][200/600] Loss_D: 0.1372 Loss_G: 3.2696 D(x): 0.9537 D(G(z)): 0.0696 / 0.0625
[4/4000][300/600] Loss_D: 0.2280 Loss_G: 3.5655 D(x): 0.9383 D(G(z)): 0.0727 / 0.0615
[4/4000][400/600] Loss_D: 0.2120 Loss_G: 3.4613 D(x): 0.9136 D(G(z)): 0.0694 / 0.0596
[4/4000][500/600] Loss_D: 0.2006 Loss_G: 3.5587 D(x): 0.9294 D(G(z)): 0.0685 / 0.0668
[5/4000][0/600] Loss_D: 0.1872 Loss_G: 4.1383 D(x): 0.9201 D(G(z)): 0.0448 / 0.0333
[5/4000][100/600] Loss_D: 0.0926 Loss_G: 3.4749 D(x): 0.9769 D(G(z)): 0.0594 / 0.0451
[5/4000][200/600] Loss_D: 0.1370 Loss_G: 4.0923 D(x): 0.9674 D(G(z)): 0.0683 / 0.0593
[5/4000][300/600] Loss_D: 0.1363 Loss_G: 3.6710 D(x): 0.9584 D(G(z)): 0.0581 / 0.0499
[5/4000][400/600] Loss_D: 0.2072 Loss_G: 4.0631 D(x): 0.9341 D(G(z)): 0.0665 / 0.0549
[5/4000][500/600] Loss_D: 0.1829 Loss_G: 3.3004 D(x): 0.9182 D(G(z)): 0.0659 / 0.0635
[6/4000][0/600] Loss_D: 0.1689 Loss_G: 4.0778 D(x): 0.9482 D(G(z)): 0.0543 / 0.0391
[6/4000][100/600] Loss_D: 0.1177 Loss_G: 4.1545 D(x): 0.9681 D(G(z)): 0.0489 / 0.0396
[6/4000][200/600] Loss_D: 0.1109 Loss_G: 4.1085 D(x): 0.9611 D(G(z)): 0.0460 / 0.0387
[6/4000][300/600] Loss_D: 0.1258 Loss_G: 4.1783 D(x): 0.9515 D(G(z)): 0.0444 / 0.0341
[6/4000][400/600] Loss_D: 0.1103 Loss_G: 3.8319 D(x): 0.9586 D(G(z)): 0.0526 / 0.0406
[6/4000][500/600] Loss_D: 0.2525 Loss_G: 3.5775 D(x): 0.9066 D(G(z)): 0.0756 / 0.0687
[7/4000][0/600] Loss_D: 0.2865 Loss_G: 4.0141 D(x): 0.8872 D(G(z)): 0.0520 / 0.0465
[7/4000][100/600] Loss_D: 0.1952 Loss_G: 4.2610 D(x): 0.9499 D(G(z)): 0.0471 / 0.0412
[7/4000][200/600] Loss_D: 0.1716 Loss_G: 3.5190 D(x): 0.9519 D(G(z)): 0.0786 / 0.0525
[7/4000][300/600] Loss_D: 0.1748 Loss_G: 3.7782 D(x): 0.9414 D(G(z)): 0.0426 / 0.0356
[7/4000][400/600] Loss_D: 0.1249 Loss_G: 3.8327 D(x): 0.9622 D(G(z)): 0.0571 / 0.0494
[7/4000][500/600] Loss_D: 0.1273 Loss_G: 3.3972 D(x): 0.9455 D(G(z)): 0.0481 / 0.0446
[8/4000][0/600] Loss_D: 0.1458 Loss_G: 3.9861 D(x): 0.9501 D(G(z)): 0.0469 / 0.0419
[8/4000][100/600] Loss_D: 0.1197 Loss_G: 4.4349 D(x): 0.9507 D(G(z)): 0.0397 / 0.0351
[8/4000][200/600] Loss_D: 0.1258 Loss_G: 4.1831 D(x): 0.9646 D(G(z)): 0.0519 / 0.0398
[8/4000][300/600] Loss_D: 0.1578 Loss_G: 4.1769 D(x): 0.9509 D(G(z)): 0.0567 / 0.0403
[8/4000][400/600] Loss_D: 0.1303 Loss_G: 4.0567 D(x): 0.9570 D(G(z)): 0.0530 / 0.0427
[8/4000][500/600] Loss_D: 0.2715 Loss_G: 3.5448 D(x): 0.9246 D(G(z)): 0.0762 / 0.0803
[9/4000][0/600] Loss_D: 0.2412 Loss_G: 4.7126 D(x): 0.9081 D(G(z)): 0.0354 / 0.0330
[9/4000][100/600] Loss_D: 0.2192 Loss_G: 4.2275 D(x): 0.9219 D(G(z)): 0.0290 / 0.0309
[9/4000][200/600] Loss_D: 0.1545 Loss_G: 4.0230 D(x): 0.9366 D(G(z)): 0.0461 / 0.0353
[9/4000][300/600] Loss_D: 0.1920 Loss_G: 3.9555 D(x): 0.9337 D(G(z)): 0.0651 / 0.0471
[9/4000][400/600] Loss_D: 0.1427 Loss_G: 4.2804 D(x): 0.9538 D(G(z)): 0.0376 / 0.0327
[9/4000][500/600] Loss_D: 0.1630 Loss_G: 3.5189 D(x): 0.9534 D(G(z)): 0.0701 / 0.0590
[10/4000][0/600] Loss_D: 0.2078 Loss_G: 4.5319 D(x): 0.9175 D(G(z)): 0.0244 / 0.0219
[10/4000][100/600] Loss_D: 0.1414 Loss_G: 4.3631 D(x): 0.9310 D(G(z)): 0.0305 / 0.0286
[10/4000][200/600] Loss_D: 0.1640 Loss_G: 4.1148 D(x): 0.9443 D(G(z)): 0.0454 / 0.0344
[10/4000][300/600] Loss_D: 0.1529 Loss_G: 3.9637 D(x): 0.9522 D(G(z)): 0.0508 / 0.0428
[10/4000][400/600] Loss_D: 0.1110 Loss_G: 3.8258 D(x): 0.9714 D(G(z)): 0.0523 / 0.0432
[10/4000][500/600] Loss_D: 0.1774 Loss_G: 3.7332 D(x): 0.9310 D(G(z)): 0.0478 / 0.0520
[11/4000][0/600] Loss_D: 0.2576 Loss_G: 4.1064 D(x): 0.9148 D(G(z)): 0.0321 / 0.0376
[11/4000][100/600] Loss_D: 0.1720 Loss_G: 4.6965 D(x): 0.9319 D(G(z)): 0.0453 / 0.0370
[11/4000][200/600] Loss_D: 0.1852 Loss_G: 4.8085 D(x): 0.9413 D(G(z)): 0.0477 / 0.0342
[11/4000][300/600] Loss_D: 0.2178 Loss_G: 4.7535 D(x): 0.9301 D(G(z)): 0.0419 / 0.0332
[11/4000][400/600] Loss_D: 0.1047 Loss_G: 4.3306 D(x): 0.9569 D(G(z)): 0.0314 / 0.0267
[11/4000][500/600] Loss_D: 0.3046 Loss_G: 3.4021 D(x): 0.9160 D(G(z)): 0.0936 / 0.0899
[12/4000][0/600] Loss_D: 0.2531 Loss_G: 4.7488 D(x): 0.9221 D(G(z)): 0.0402 / 0.0378
[12/4000][100/600] Loss_D: 0.1239 Loss_G: 4.2564 D(x): 0.9577 D(G(z)): 0.0452 / 0.0361
[12/4000][200/600] Loss_D: 0.1882 Loss_G: 4.2550 D(x): 0.9200 D(G(z)): 0.0373 / 0.0286
[12/4000][300/600] Loss_D: 0.1465 Loss_G: 4.4029 D(x): 0.9436 D(G(z)): 0.0445 / 0.0345
[12/4000][400/600] Loss_D: 0.1528 Loss_G: 5.1419 D(x): 0.9562 D(G(z)): 0.0408 / 0.0257
[12/4000][500/600] Loss_D: 0.1873 Loss_G: 3.8282 D(x): 0.9376 D(G(z)): 0.0502 / 0.0480
[13/4000][0/600] Loss_D: 0.2298 Loss_G: 4.9067 D(x): 0.9253 D(G(z)): 0.0280 / 0.0282
[13/4000][100/600] Loss_D: 0.0710 Loss_G: 5.0316 D(x): 0.9749 D(G(z)): 0.0233 / 0.0190
[13/4000][200/600] Loss_D: 0.0947 Loss_G: 4.3583 D(x): 0.9596 D(G(z)): 0.0381 / 0.0266
[13/4000][300/600] Loss_D: 0.1968 Loss_G: 4.8715 D(x): 0.9360 D(G(z)): 0.0498 / 0.0328
[13/4000][400/600] Loss_D: 0.1804 Loss_G: 4.4136 D(x): 0.9358 D(G(z)): 0.0386 / 0.0271
[13/4000][500/600] Loss_D: 0.3379 Loss_G: 3.5644 D(x): 0.9285 D(G(z)): 0.0920 / 0.0958
[14/4000][0/600] Loss_D: 0.1777 Loss_G: 4.7153 D(x): 0.9436 D(G(z)): 0.0356 / 0.0227
[14/4000][100/600] Loss_D: 0.1163 Loss_G: 4.1540 D(x): 0.9521 D(G(z)): 0.0390 / 0.0339
[14/4000][200/600] Loss_D: 0.2591 Loss_G: 5.5598 D(x): 0.9160 D(G(z)): 0.0317 / 0.0255
[14/4000][300/600] Loss_D: 0.3559 Loss_G: 4.0681 D(x): 0.9081 D(G(z)): 0.0763 / 0.0606
[14/4000][400/600] Loss_D: 0.3546 Loss_G: 3.6916 D(x): 0.8991 D(G(z)): 0.0591 / 0.0592
[14/4000][500/600] Loss_D: 0.2818 Loss_G: 3.2939 D(x): 0.9179 D(G(z)): 0.0762 / 0.0752
[15/4000][0/600] Loss_D: 0.2752 Loss_G: 4.3120 D(x): 0.9116 D(G(z)): 0.0562 / 0.0409
[15/4000][100/600] Loss_D: 0.2013 Loss_G: 4.3576 D(x): 0.9291 D(G(z)): 0.0483 / 0.0391
[15/4000][200/600] Loss_D: 0.2562 Loss_G: 4.7551 D(x): 0.9246 D(G(z)): 0.0572 / 0.0337
[15/4000][300/600] Loss_D: 0.2374 Loss_G: 4.1621 D(x): 0.9384 D(G(z)): 0.0629 / 0.0382
[15/4000][400/600] Loss_D: 0.2246 Loss_G: 4.2673 D(x): 0.9579 D(G(z)): 0.0739 / 0.0595
[15/4000][500/600] Loss_D: 0.4626 Loss_G: 3.5991 D(x): 0.9029 D(G(z)): 0.0950 / 0.0830
[16/4000][0/600] Loss_D: 0.3111 Loss_G: 4.6631 D(x): 0.9173 D(G(z)): 0.0490 / 0.0298
[16/4000][100/600] Loss_D: 0.2168 Loss_G: 3.9748 D(x): 0.9185 D(G(z)): 0.0514 / 0.0434
[16/4000][200/600] Loss_D: 0.2139 Loss_G: 3.4214 D(x): 0.9595 D(G(z)): 0.0956 / 0.0674
[16/4000][300/600] Loss_D: 0.2179 Loss_G: 3.9391 D(x): 0.9377 D(G(z)): 0.0506 / 0.0394
[16/4000][400/600] Loss_D: 0.1874 Loss_G: 4.2472 D(x): 0.9392 D(G(z)): 0.0703 / 0.0507
[16/4000][500/600] Loss_D: 0.3276 Loss_G: 3.8257 D(x): 0.8834 D(G(z)): 0.0516 / 0.0435
[17/4000][0/600] Loss_D: 0.3193 Loss_G: 3.6067 D(x): 0.8978 D(G(z)): 0.0738 / 0.0794
[17/4000][100/600] Loss_D: 0.1693 Loss_G: 4.2673 D(x): 0.9341 D(G(z)): 0.0389 / 0.0303
[17/4000][200/600] Loss_D: 0.1585 Loss_G: 3.8715 D(x): 0.9612 D(G(z)): 0.0853 / 0.0533
[17/4000][300/600] Loss_D: 0.1994 Loss_G: 3.8724 D(x): 0.9556 D(G(z)): 0.0816 / 0.0594
[17/4000][400/600] Loss_D: 0.1796 Loss_G: 4.1420 D(x): 0.9452 D(G(z)): 0.0425 / 0.0359
[17/4000][500/600] Loss_D: 0.3352 Loss_G: 3.5258 D(x): 0.8856 D(G(z)): 0.0650 / 0.0639
[18/4000][0/600] Loss_D: 0.2993 Loss_G: 3.6599 D(x): 0.8930 D(G(z)): 0.0524 / 0.0626
[18/4000][100/600] Loss_D: 0.2067 Loss_G: 3.8125 D(x): 0.9401 D(G(z)): 0.0653 / 0.0549
[18/4000][200/600] Loss_D: 0.2984 Loss_G: 3.4401 D(x): 0.9277 D(G(z)): 0.1147 / 0.0717
[18/4000][300/600] Loss_D: 0.2328 Loss_G: 4.0425 D(x): 0.9308 D(G(z)): 0.0649 / 0.0465
[18/4000][400/600] Loss_D: 0.2446 Loss_G: 4.3853 D(x): 0.9200 D(G(z)): 0.0509 / 0.0433
[18/4000][500/600] Loss_D: 0.5304 Loss_G: 3.0126 D(x): 0.8619 D(G(z)): 0.0913 / 0.1037
[19/4000][0/600] Loss_D: 0.2525 Loss_G: 4.2234 D(x): 0.9136 D(G(z)): 0.0461 / 0.0458
[19/4000][100/600] Loss_D: 0.1537 Loss_G: 3.6100 D(x): 0.9645 D(G(z)): 0.0755 / 0.0653
[19/4000][200/600] Loss_D: 0.2696 Loss_G: 3.7655 D(x): 0.9161 D(G(z)): 0.0591 / 0.0458
[19/4000][300/600] Loss_D: 0.2362 Loss_G: 3.4384 D(x): 0.9392 D(G(z)): 0.0865 / 0.0669
[19/4000][400/600] Loss_D: 0.2464 Loss_G: 3.6067 D(x): 0.9471 D(G(z)): 0.0912 / 0.0660
[19/4000][500/600] Loss_D: 0.4628 Loss_G: 3.6507 D(x): 0.8545 D(G(z)): 0.0814 / 0.0775
[20/4000][0/600] Loss_D: 0.1629 Loss_G: 4.6087 D(x): 0.9358 D(G(z)): 0.0181 / 0.0163
[20/4000][100/600] Loss_D: 0.1515 Loss_G: 4.2801 D(x): 0.9527 D(G(z)): 0.0414 / 0.0419
[20/4000][200/600] Loss_D: 0.3508 Loss_G: 4.1175 D(x): 0.9167 D(G(z)): 0.0879 / 0.0680
[20/4000][300/600] Loss_D: 0.3195 Loss_G: 3.8492 D(x): 0.9416 D(G(z)): 0.1345 / 0.0668
[20/4000][400/600] Loss_D: 0.2248 Loss_G: 3.9864 D(x): 0.9204 D(G(z)): 0.0435 / 0.0352
[20/4000][500/600] Loss_D: 0.4586 Loss_G: 4.1386 D(x): 0.8336 D(G(z)): 0.0448 / 0.0434
[21/4000][0/600] Loss_D: 0.3108 Loss_G: 4.1034 D(x): 0.8784 D(G(z)): 0.0665 / 0.0522
[21/4000][100/600] Loss_D: 0.1553 Loss_G: 3.6249 D(x): 0.9511 D(G(z)): 0.0674 / 0.0559
[21/4000][200/600] Loss_D: 0.2164 Loss_G: 3.5105 D(x): 0.9213 D(G(z)): 0.0641 / 0.0493
[21/4000][300/600] Loss_D: 0.2931 Loss_G: 3.8814 D(x): 0.8858 D(G(z)): 0.0550 / 0.0433
[21/4000][400/600] Loss_D: 0.3122 Loss_G: 3.0501 D(x): 0.9325 D(G(z)): 0.1283 / 0.0881
[21/4000][500/600] Loss_D: 0.5186 Loss_G: 2.8425 D(x): 0.8799 D(G(z)): 0.1442 / 0.1279
[22/4000][0/600] Loss_D: 0.2672 Loss_G: 4.0608 D(x): 0.9172 D(G(z)): 0.0529 / 0.0569
[22/4000][100/600] Loss_D: 0.2714 Loss_G: 3.5175 D(x): 0.9327 D(G(z)): 0.0766 / 0.0647
[22/4000][200/600] Loss_D: 0.3293 Loss_G: 3.2239 D(x): 0.9096 D(G(z)): 0.1155 / 0.0869
[22/4000][300/600] Loss_D: 0.3881 Loss_G: 3.0671 D(x): 0.9063 D(G(z)): 0.1223 / 0.0842
[22/4000][400/600] Loss_D: 0.2571 Loss_G: 4.0376 D(x): 0.9172 D(G(z)): 0.0508 / 0.0429
[22/4000][500/600] Loss_D: 0.4496 Loss_G: 3.3256 D(x): 0.8475 D(G(z)): 0.0816 / 0.0797
[23/4000][0/600] Loss_D: 0.3685 Loss_G: 3.7780 D(x): 0.8562 D(G(z)): 0.0592 / 0.0648
[23/4000][100/600] Loss_D: 0.3512 Loss_G: 3.2319 D(x): 0.9423 D(G(z)): 0.1368 / 0.0922
[23/4000][200/600] Loss_D: 0.2904 Loss_G: 3.7348 D(x): 0.9168 D(G(z)): 0.0638 / 0.0578
[23/4000][300/600] Loss_D: 0.3401 Loss_G: 3.8631 D(x): 0.8895 D(G(z)): 0.0703 / 0.0537
[23/4000][400/600] Loss_D: 0.3412 Loss_G: 3.3387 D(x): 0.9109 D(G(z)): 0.0909 / 0.0725
[23/4000][500/600] Loss_D: 0.4790 Loss_G: 3.1724 D(x): 0.8428 D(G(z)): 0.0993 / 0.0843
[24/4000][0/600] Loss_D: 0.2664 Loss_G: 3.1316 D(x): 0.9112 D(G(z)): 0.0738 / 0.0830
[24/4000][100/600] Loss_D: 0.2431 Loss_G: 3.5844 D(x): 0.9297 D(G(z)): 0.0712 / 0.0687
[24/4000][200/600] Loss_D: 0.2188 Loss_G: 3.7185 D(x): 0.9392 D(G(z)): 0.0690 / 0.0525
[24/4000][300/600] Loss_D: 0.2784 Loss_G: 3.7379 D(x): 0.8974 D(G(z)): 0.0636 / 0.0462
[24/4000][400/600] Loss_D: 0.2626 Loss_G: 3.3592 D(x): 0.9225 D(G(z)): 0.0925 / 0.0760
[24/4000][500/600] Loss_D: 0.5501 Loss_G: 3.2116 D(x): 0.8518 D(G(z)): 0.1171 / 0.1006
[25/4000][0/600] Loss_D: 0.3254 Loss_G: 3.0567 D(x): 0.8994 D(G(z)): 0.1202 / 0.1016
[25/4000][100/600] Loss_D: 0.3416 Loss_G: 3.2383 D(x): 0.9001 D(G(z)): 0.0866 / 0.0839
[25/4000][200/600] Loss_D: 0.3489 Loss_G: 3.3327 D(x): 0.9059 D(G(z)): 0.0971 / 0.0766
[25/4000][300/600] Loss_D: 0.3903 Loss_G: 3.7293 D(x): 0.9059 D(G(z)): 0.1345 / 0.0894
[25/4000][400/600] Loss_D: 0.3488 Loss_G: 3.5247 D(x): 0.8811 D(G(z)): 0.0859 / 0.0755
[25/4000][500/600] Loss_D: 0.5703 Loss_G: 2.8312 D(x): 0.8467 D(G(z)): 0.1355 / 0.1146
[26/4000][0/600] Loss_D: 0.3349 Loss_G: 3.0121 D(x): 0.8927 D(G(z)): 0.0989 / 0.0969
[26/4000][100/600] Loss_D: 0.2905 Loss_G: 3.1034 D(x): 0.9185 D(G(z)): 0.1044 / 0.0921
[26/4000][200/600] Loss_D: 0.2681 Loss_G: 3.3595 D(x): 0.9303 D(G(z)): 0.0972 / 0.0765
[26/4000][300/600] Loss_D: 0.2435 Loss_G: 3.6810 D(x): 0.9062 D(G(z)): 0.0617 / 0.0446
[26/4000][400/600] Loss_D: 0.4153 Loss_G: 2.7159 D(x): 0.8971 D(G(z)): 0.1360 / 0.1094
[26/4000][500/600] Loss_D: 0.4654 Loss_G: 2.9271 D(x): 0.8640 D(G(z)): 0.1168 / 0.1037
[27/4000][0/600] Loss_D: 0.4603 Loss_G: 2.9726 D(x): 0.8576 D(G(z)): 0.0968 / 0.0978
[27/4000][100/600] Loss_D: 0.2128 Loss_G: 3.4659 D(x): 0.9187 D(G(z)): 0.0720 / 0.0581
[27/4000][200/600] Loss_D: 0.3047 Loss_G: 3.6047 D(x): 0.8979 D(G(z)): 0.0689 / 0.0565
[27/4000][300/600] Loss_D: 0.3390 Loss_G: 3.2369 D(x): 0.8947 D(G(z)): 0.0797 / 0.0643
[27/4000][400/600] Loss_D: 0.4008 Loss_G: 3.1743 D(x): 0.8817 D(G(z)): 0.1094 / 0.0873
[27/4000][500/600] Loss_D: 0.5823 Loss_G: 2.7160 D(x): 0.8262 D(G(z)): 0.1475 / 0.1381
[28/4000][0/600] Loss_D: 0.4098 Loss_G: 2.9511 D(x): 0.8831 D(G(z)): 0.1056 / 0.1196
[28/4000][100/600] Loss_D: 0.2416 Loss_G: 3.0887 D(x): 0.9285 D(G(z)): 0.0955 / 0.0856
[28/4000][200/600] Loss_D: 0.3193 Loss_G: 3.7503 D(x): 0.9047 D(G(z)): 0.0787 / 0.0560
[28/4000][300/600] Loss_D: 0.3543 Loss_G: 3.3483 D(x): 0.9160 D(G(z)): 0.1244 / 0.0915
[28/4000][400/600] Loss_D: 0.3479 Loss_G: 3.3682 D(x): 0.8856 D(G(z)): 0.0864 / 0.0731
[28/4000][500/600] Loss_D: 0.3734 Loss_G: 3.0412 D(x): 0.8797 D(G(z)): 0.1228 / 0.1073
[29/4000][0/600] Loss_D: 0.4223 Loss_G: 3.1224 D(x): 0.8473 D(G(z)): 0.0978 / 0.1093
[29/4000][100/600] Loss_D: 0.3389 Loss_G: 3.2174 D(x): 0.9020 D(G(z)): 0.1137 / 0.0967
[29/4000][200/600] Loss_D: 0.3523 Loss_G: 2.8876 D(x): 0.9303 D(G(z)): 0.1591 / 0.1074
[29/4000][300/600] Loss_D: 0.3224 Loss_G: 3.2518 D(x): 0.9149 D(G(z)): 0.1055 / 0.0795
[29/4000][400/600] Loss_D: 0.4500 Loss_G: 2.9201 D(x): 0.8767 D(G(z)): 0.1417 / 0.1212
[29/4000][500/600] Loss_D: 0.5733 Loss_G: 2.9819 D(x): 0.8342 D(G(z)): 0.1219 / 0.1121
[30/4000][0/600] Loss_D: 0.5039 Loss_G: 3.4335 D(x): 0.8310 D(G(z)): 0.0894 / 0.0703
[30/4000][100/600] Loss_D: 0.3263 Loss_G: 2.7857 D(x): 0.8975 D(G(z)): 0.1223 / 0.1275
[30/4000][200/600] Loss_D: 0.3062 Loss_G: 3.1262 D(x): 0.9070 D(G(z)): 0.1088 / 0.0862
[30/4000][300/600] Loss_D: 0.4748 Loss_G: 3.3045 D(x): 0.8465 D(G(z)): 0.0973 / 0.0799
[30/4000][400/600] Loss_D: 0.4836 Loss_G: 2.8173 D(x): 0.8477 D(G(z)): 0.1470 / 0.1173
[30/4000][500/600] Loss_D: 0.5499 Loss_G: 3.2664 D(x): 0.7927 D(G(z)): 0.1016 / 0.0923
[31/4000][0/600] Loss_D: 0.5268 Loss_G: 2.6378 D(x): 0.8453 D(G(z)): 0.1481 / 0.1407
[31/4000][100/600] Loss_D: 0.4094 Loss_G: 2.9911 D(x): 0.8749 D(G(z)): 0.1248 / 0.1031
[31/4000][200/600] Loss_D: 0.3267 Loss_G: 3.1047 D(x): 0.9156 D(G(z)): 0.1134 / 0.0898
[31/4000][300/600] Loss_D: 0.3722 Loss_G: 2.9303 D(x): 0.8947 D(G(z)): 0.1177 / 0.0980
[31/4000][400/600] Loss_D: 0.3960 Loss_G: 2.7605 D(x): 0.8711 D(G(z)): 0.1406 / 0.1156
[31/4000][500/600] Loss_D: 0.4482 Loss_G: 3.0086 D(x): 0.8355 D(G(z)): 0.1195 / 0.1058
[32/4000][0/600] Loss_D: 0.4014 Loss_G: 2.7634 D(x): 0.8808 D(G(z)): 0.1210 / 0.1173
[32/4000][100/600] Loss_D: 0.4384 Loss_G: 2.5910 D(x): 0.8899 D(G(z)): 0.1768 / 0.1514
[32/4000][200/600] Loss_D: 0.4307 Loss_G: 2.6906 D(x): 0.9041 D(G(z)): 0.1673 / 0.1269
[32/4000][300/600] Loss_D: 0.5853 Loss_G: 2.8867 D(x): 0.8435 D(G(z)): 0.1299 / 0.1147
[32/4000][400/600] Loss_D: 0.4945 Loss_G: 3.2311 D(x): 0.8245 D(G(z)): 0.0995 / 0.0867
[32/4000][500/600] Loss_D: 0.4423 Loss_G: 2.5647 D(x): 0.8469 D(G(z)): 0.1418 / 0.1407
[33/4000][0/600] Loss_D: 0.3679 Loss_G: 2.7405 D(x): 0.8932 D(G(z)): 0.1144 / 0.1143
[33/4000][100/600] Loss_D: 0.3952 Loss_G: 3.1117 D(x): 0.8619 D(G(z)): 0.1011 / 0.0919
[33/4000][200/600] Loss_D: 0.3674 Loss_G: 3.2870 D(x): 0.9140 D(G(z)): 0.1327 / 0.0875
[33/4000][300/600] Loss_D: 0.4402 Loss_G: 2.7176 D(x): 0.8792 D(G(z)): 0.1466 / 0.1361
[33/4000][400/600] Loss_D: 0.4532 Loss_G: 3.2229 D(x): 0.8380 D(G(z)): 0.1106 / 0.0855
[33/4000][500/600] Loss_D: 0.5484 Loss_G: 2.8585 D(x): 0.8163 D(G(z)): 0.1146 / 0.1138
[34/4000][0/600] Loss_D: 0.3432 Loss_G: 2.8364 D(x): 0.8923 D(G(z)): 0.1098 / 0.1112
[34/4000][100/600] Loss_D: 0.3243 Loss_G: 3.0665 D(x): 0.8924 D(G(z)): 0.0920 / 0.0841
[34/4000][200/600] Loss_D: 0.3949 Loss_G: 2.9613 D(x): 0.9229 D(G(z)): 0.1408 / 0.1038
[34/4000][300/600] Loss_D: 0.3324 Loss_G: 3.3682 D(x): 0.8978 D(G(z)): 0.1164 / 0.0890
[34/4000][400/600] Loss_D: 0.5558 Loss_G: 3.0174 D(x): 0.8005 D(G(z)): 0.1108 / 0.0943
[34/4000][500/600] Loss_D: 0.3738 Loss_G: 2.9809 D(x): 0.8859 D(G(z)): 0.1301 / 0.1147
[35/4000][0/600] Loss_D: 0.4338 Loss_G: 2.5555 D(x): 0.8839 D(G(z)): 0.1401 / 0.1482
[35/4000][100/600] Loss_D: 0.4277 Loss_G: 3.1262 D(x): 0.8520 D(G(z)): 0.0724 / 0.0807
[35/4000][200/600] Loss_D: 0.2929 Loss_G: 3.5806 D(x): 0.9279 D(G(z)): 0.0781 / 0.0562
[35/4000][300/600] Loss_D: 0.3460 Loss_G: 3.1369 D(x): 0.8975 D(G(z)): 0.1254 / 0.0979
[35/4000][400/600] Loss_D: 0.4706 Loss_G: 2.9849 D(x): 0.8623 D(G(z)): 0.1423 / 0.1066
[35/4000][500/600] Loss_D: 0.5318 Loss_G: 2.9824 D(x): 0.8410 D(G(z)): 0.1139 / 0.1144
[36/4000][0/600] Loss_D: 0.5463 Loss_G: 2.4039 D(x): 0.8375 D(G(z)): 0.1613 / 0.1645
[36/4000][100/600] Loss_D: 0.4763 Loss_G: 2.7185 D(x): 0.8498 D(G(z)): 0.1226 / 0.1234
[36/4000][200/600] Loss_D: 0.3315 Loss_G: 2.9743 D(x): 0.9252 D(G(z)): 0.1245 / 0.1003
[36/4000][300/600] Loss_D: 0.5778 Loss_G: 2.7783 D(x): 0.8345 D(G(z)): 0.1499 / 0.1376
[36/4000][400/600] Loss_D: 0.5249 Loss_G: 2.6603 D(x): 0.8384 D(G(z)): 0.1552 / 0.1247
[36/4000][500/600] Loss_D: 0.5179 Loss_G: 2.7213 D(x): 0.8213 D(G(z)): 0.1152 / 0.1146
[37/4000][0/600] Loss_D: 0.4311 Loss_G: 2.5007 D(x): 0.8776 D(G(z)): 0.1624 / 0.1396
[37/4000][100/600] Loss_D: 0.4038 Loss_G: 2.7086 D(x): 0.8990 D(G(z)): 0.1530 / 0.1378
[37/4000][200/600] Loss_D: 0.3413 Loss_G: 3.0976 D(x): 0.9072 D(G(z)): 0.1122 / 0.0898
[37/4000][300/600] Loss_D: 0.4051 Loss_G: 2.9414 D(x): 0.8676 D(G(z)): 0.1231 / 0.1123
[37/4000][400/600] Loss_D: 0.4972 Loss_G: 2.3394 D(x): 0.8590 D(G(z)): 0.1752 / 0.1596
[37/4000][500/600] Loss_D: 0.4950 Loss_G: 2.7623 D(x): 0.8390 D(G(z)): 0.1336 / 0.1255
[38/4000][0/600] Loss_D: 0.4511 Loss_G: 2.4483 D(x): 0.8846 D(G(z)): 0.1693 / 0.1627
[38/4000][100/600] Loss_D: 0.3730 Loss_G: 3.0708 D(x): 0.8593 D(G(z)): 0.1084 / 0.0942
[38/4000][200/600] Loss_D: 0.4048 Loss_G: 3.0875 D(x): 0.8973 D(G(z)): 0.1296 / 0.1005
[38/4000][300/600] Loss_D: 0.3753 Loss_G: 2.8051 D(x): 0.8873 D(G(z)): 0.1273 / 0.1025
[38/4000][400/600] Loss_D: 0.5493 Loss_G: 2.9082 D(x): 0.8469 D(G(z)): 0.1335 / 0.1083
[38/4000][500/600] Loss_D: 0.5097 Loss_G: 3.0368 D(x): 0.8206 D(G(z)): 0.1103 / 0.0891
[39/4000][0/600] Loss_D: 0.5992 Loss_G: 2.4516 D(x): 0.8368 D(G(z)): 0.1892 / 0.1726
[39/4000][100/600] Loss_D: 0.4956 Loss_G: 2.8492 D(x): 0.8493 D(G(z)): 0.1276 / 0.1260
[39/4000][200/600] Loss_D: 0.2719 Loss_G: 3.6851 D(x): 0.9090 D(G(z)): 0.0784 / 0.0481
[39/4000][300/600] Loss_D: 0.3301 Loss_G: 3.1010 D(x): 0.8979 D(G(z)): 0.1155 / 0.0995
[39/4000][400/600] Loss_D: 0.6371 Loss_G: 2.6012 D(x): 0.8029 D(G(z)): 0.1675 / 0.1424
[39/4000][500/600] Loss_D: 0.5005 Loss_G: 2.9475 D(x): 0.8226 D(G(z)): 0.1069 / 0.0924
[40/4000][0/600] Loss_D: 0.4994 Loss_G: 2.6995 D(x): 0.8618 D(G(z)): 0.1767 / 0.1571
[40/4000][100/600] Loss_D: 0.3329 Loss_G: 2.8434 D(x): 0.8977 D(G(z)): 0.1348 / 0.1120
[40/4000][200/600] Loss_D: 0.3004 Loss_G: 3.2682 D(x): 0.9150 D(G(z)): 0.1006 / 0.0767
[40/4000][300/600] Loss_D: 0.4380 Loss_G: 2.6162 D(x): 0.8902 D(G(z)): 0.1659 / 0.1261
[40/4000][400/600] Loss_D: 0.6308 Loss_G: 2.7816 D(x): 0.7711 D(G(z)): 0.1205 / 0.1282
[40/4000][500/600] Loss_D: 0.6276 Loss_G: 2.8023 D(x): 0.7953 D(G(z)): 0.1251 / 0.1159
[41/4000][0/600] Loss_D: 0.4051 Loss_G: 2.7135 D(x): 0.8850 D(G(z)): 0.1449 / 0.1331
[41/4000][100/600] Loss_D: 0.3316 Loss_G: 2.7324 D(x): 0.8885 D(G(z)): 0.1266 / 0.1230
[41/4000][200/600] Loss_D: 0.3713 Loss_G: 2.9246 D(x): 0.8849 D(G(z)): 0.1185 / 0.0993
[41/4000][300/600] Loss_D: 0.4939 Loss_G: 2.7866 D(x): 0.8546 D(G(z)): 0.1538 / 0.1198
[41/4000][400/600] Loss_D: 0.4108 Loss_G: 3.0407 D(x): 0.8592 D(G(z)): 0.1195 / 0.0991
[41/4000][500/600] Loss_D: 0.5167 Loss_G: 3.0142 D(x): 0.8179 D(G(z)): 0.1139 / 0.1093
[42/4000][0/600] Loss_D: 0.4781 Loss_G: 2.5885 D(x): 0.8528 D(G(z)): 0.1568 / 0.1389
[42/4000][100/600] Loss_D: 0.3734 Loss_G: 2.6368 D(x): 0.8998 D(G(z)): 0.1550 / 0.1469
[42/4000][200/600] Loss_D: 0.3057 Loss_G: 2.8751 D(x): 0.9152 D(G(z)): 0.1281 / 0.1055
[42/4000][300/600] Loss_D: 0.3831 Loss_G: 2.8361 D(x): 0.8844 D(G(z)): 0.1494 / 0.1162
[42/4000][400/600] Loss_D: 0.5316 Loss_G: 2.5755 D(x): 0.8594 D(G(z)): 0.1737 / 0.1616
[42/4000][500/600] Loss_D: 0.4667 Loss_G: 3.1803 D(x): 0.8195 D(G(z)): 0.0851 / 0.0736
[43/4000][0/600] Loss_D: 0.4168 Loss_G: 2.8234 D(x): 0.8782 D(G(z)): 0.1194 / 0.1213
[43/4000][100/600] Loss_D: 0.4122 Loss_G: 2.7923 D(x): 0.8629 D(G(z)): 0.1194 / 0.1130
[43/4000][200/600] Loss_D: 0.3148 Loss_G: 2.9030 D(x): 0.9227 D(G(z)): 0.1329 / 0.1075
[43/4000][300/600] Loss_D: 0.3429 Loss_G: 3.1405 D(x): 0.8784 D(G(z)): 0.1133 / 0.0945
[43/4000][400/600] Loss_D: 0.4876 Loss_G: 2.5807 D(x): 0.8539 D(G(z)): 0.1797 / 0.1454
[43/4000][500/600] Loss_D: 0.4095 Loss_G: 2.9528 D(x): 0.8500 D(G(z)): 0.1115 / 0.1031
[44/4000][0/600] Loss_D: 0.5267 Loss_G: 2.3988 D(x): 0.8696 D(G(z)): 0.2120 / 0.1939
[44/4000][100/600] Loss_D: 0.4725 Loss_G: 2.8151 D(x): 0.8683 D(G(z)): 0.1364 / 0.1060
[44/4000][200/600] Loss_D: 0.2462 Loss_G: 3.1103 D(x): 0.9373 D(G(z)): 0.1135 / 0.0783
[44/4000][300/600] Loss_D: 0.4446 Loss_G: 2.7483 D(x): 0.8540 D(G(z)): 0.1423 / 0.1151
[44/4000][400/600] Loss_D: 0.5856 Loss_G: 2.6272 D(x): 0.8211 D(G(z)): 0.1850 / 0.1472
[44/4000][500/600] Loss_D: 0.5436 Loss_G: 2.6623 D(x): 0.8221 D(G(z)): 0.1265 / 0.1198
[45/4000][0/600] Loss_D: 0.4744 Loss_G: 2.8802 D(x): 0.8534 D(G(z)): 0.1400 / 0.1347
[45/4000][100/600] Loss_D: 0.5077 Loss_G: 2.5464 D(x): 0.8605 D(G(z)): 0.1698 / 0.1596
[45/4000][200/600] Loss_D: 0.2803 Loss_G: 3.2791 D(x): 0.9104 D(G(z)): 0.0964 / 0.0752
[45/4000][300/600] Loss_D: 0.3431 Loss_G: 3.1680 D(x): 0.9119 D(G(z)): 0.1306 / 0.0918
[45/4000][400/600] Loss_D: 0.4404 Loss_G: 2.8032 D(x): 0.8483 D(G(z)): 0.1229 / 0.1087
[45/4000][500/600] Loss_D: 0.4202 Loss_G: 3.1751 D(x): 0.8310 D(G(z)): 0.0932 / 0.0881
[46/4000][0/600] Loss_D: 0.4831 Loss_G: 3.2765 D(x): 0.8410 D(G(z)): 0.0906 / 0.0879
[46/4000][100/600] Loss_D: 0.3768 Loss_G: 3.0423 D(x): 0.8859 D(G(z)): 0.1081 / 0.0931
[46/4000][200/600] Loss_D: 0.3640 Loss_G: 2.8558 D(x): 0.9165 D(G(z)): 0.1644 / 0.1224
[46/4000][300/600] Loss_D: 0.3843 Loss_G: 2.6425 D(x): 0.9133 D(G(z)): 0.1671 / 0.1356
[46/4000][400/600] Loss_D: 0.5561 Loss_G: 2.9988 D(x): 0.8377 D(G(z)): 0.1331 / 0.0983
[46/4000][500/600] Loss_D: 0.5790 Loss_G: 2.5949 D(x): 0.8268 D(G(z)): 0.1501 / 0.1359
[47/4000][0/600] Loss_D: 0.3835 Loss_G: 3.0077 D(x): 0.8698 D(G(z)): 0.1212 / 0.1155
[47/4000][100/600] Loss_D: 0.2895 Loss_G: 2.8779 D(x): 0.8958 D(G(z)): 0.1049 / 0.1111
[47/4000][200/600] Loss_D: 0.2815 Loss_G: 3.0944 D(x): 0.9168 D(G(z)): 0.1064 / 0.0803
[47/4000][300/600] Loss_D: 0.3636 Loss_G: 2.6837 D(x): 0.9151 D(G(z)): 0.1636 / 0.1134
[47/4000][400/600] Loss_D: 0.4955 Loss_G: 3.1368 D(x): 0.8299 D(G(z)): 0.1138 / 0.0966
[47/4000][500/600] Loss_D: 0.7079 Loss_G: 2.6287 D(x): 0.7893 D(G(z)): 0.1397 / 0.1323
[48/4000][0/600] Loss_D: 0.6683 Loss_G: 2.2204 D(x): 0.8390 D(G(z)): 0.2382 / 0.2117
[48/4000][100/600] Loss_D: 0.4372 Loss_G: 2.7433 D(x): 0.8690 D(G(z)): 0.1221 / 0.1217
[48/4000][200/600] Loss_D: 0.2916 Loss_G: 3.3190 D(x): 0.9116 D(G(z)): 0.1089 / 0.0736
[48/4000][300/600] Loss_D: 0.4447 Loss_G: 2.5820 D(x): 0.9055 D(G(z)): 0.1916 / 0.1557
[48/4000][400/600] Loss_D: 0.5110 Loss_G: 2.7817 D(x): 0.8397 D(G(z)): 0.1380 / 0.1137
[48/4000][500/600] Loss_D: 0.4582 Loss_G: 2.8895 D(x): 0.8447 D(G(z)): 0.1045 / 0.1043
[49/4000][0/600] Loss_D: 0.4709 Loss_G: 2.8786 D(x): 0.8617 D(G(z)): 0.1580 / 0.1431
[49/4000][100/600] Loss_D: 0.3867 Loss_G: 2.6986 D(x): 0.8875 D(G(z)): 0.1357 / 0.1302
[49/4000][200/600] Loss_D: 0.3329 Loss_G: 2.8842 D(x): 0.9104 D(G(z)): 0.1071 / 0.0928
[49/4000][300/600] Loss_D: 0.4008 Loss_G: 2.7889 D(x): 0.9278 D(G(z)): 0.1913 / 0.1320
[49/4000][400/600] Loss_D: 0.5505 Loss_G: 2.7748 D(x): 0.8434 D(G(z)): 0.1636 / 0.1269
[49/4000][500/600] Loss_D: 0.6032 Loss_G: 2.6608 D(x): 0.8215 D(G(z)): 0.1530 / 0.1455
[50/4000][0/600] Loss_D: 0.4192 Loss_G: 2.5626 D(x): 0.8789 D(G(z)): 0.1594 / 0.1539
[50/4000][100/600] Loss_D: 0.5315 Loss_G: 2.5317 D(x): 0.8382 D(G(z)): 0.1337 / 0.1506
[50/4000][200/600] Loss_D: 0.4153 Loss_G: 2.7624 D(x): 0.9104 D(G(z)): 0.1641 / 0.1292
[50/4000][300/600] Loss_D: 0.4112 Loss_G: 2.6777 D(x): 0.8732 D(G(z)): 0.1621 / 0.1196
[50/4000][400/600] Loss_D: 0.5856 Loss_G: 2.2897 D(x): 0.8302 D(G(z)): 0.1966 / 0.1663
[50/4000][500/600] Loss_D: 0.6029 Loss_G: 2.6110 D(x): 0.7965 D(G(z)): 0.1326 / 0.1270
[51/4000][0/600] Loss_D: 0.5014 Loss_G: 2.7821 D(x): 0.8339 D(G(z)): 0.1264 / 0.1217
[51/4000][100/600] Loss_D: 0.4285 Loss_G: 2.5863 D(x): 0.8484 D(G(z)): 0.1290 / 0.1282
[51/4000][200/600] Loss_D: 0.3343 Loss_G: 2.8013 D(x): 0.9251 D(G(z)): 0.1517 / 0.1194
[51/4000][300/600] Loss_D: 0.4989 Loss_G: 2.5915 D(x): 0.8607 D(G(z)): 0.1615 / 0.1409
[51/4000][400/600] Loss_D: 0.5177 Loss_G: 2.5434 D(x): 0.8347 D(G(z)): 0.1524 / 0.1368
[51/4000][500/600] Loss_D: 0.6804 Loss_G: 2.4331 D(x): 0.7993 D(G(z)): 0.1755 / 0.1734
[52/4000][0/600] Loss_D: 0.4769 Loss_G: 2.5695 D(x): 0.8400 D(G(z)): 0.1566 / 0.1605
[52/4000][100/600] Loss_D: 0.4380 Loss_G: 2.3845 D(x): 0.8707 D(G(z)): 0.1635 / 0.1587
[52/4000][200/600] Loss_D: 0.3575 Loss_G: 3.0943 D(x): 0.9170 D(G(z)): 0.1343 / 0.1148
[52/4000][300/600] Loss_D: 0.5078 Loss_G: 2.6666 D(x): 0.8449 D(G(z)): 0.1639 / 0.1292
[52/4000][400/600] Loss_D: 0.5052 Loss_G: 2.8452 D(x): 0.8537 D(G(z)): 0.1487 / 0.1249
[52/4000][500/600] Loss_D: 0.5867 Loss_G: 2.4491 D(x): 0.8032 D(G(z)): 0.1422 / 0.1365
[53/4000][0/600] Loss_D: 0.5370 Loss_G: 2.6371 D(x): 0.8428 D(G(z)): 0.1460 / 0.1451
[53/4000][100/600] Loss_D: 0.3384 Loss_G: 2.7270 D(x): 0.8952 D(G(z)): 0.1353 / 0.1311
[53/4000][200/600] Loss_D: 0.2745 Loss_G: 2.6709 D(x): 0.9463 D(G(z)): 0.1563 / 0.1149
[53/4000][300/600] Loss_D: 0.4323 Loss_G: 2.7827 D(x): 0.8780 D(G(z)): 0.1448 / 0.1060
[53/4000][400/600] Loss_D: 0.3864 Loss_G: 2.7710 D(x): 0.8600 D(G(z)): 0.1321 / 0.1160
[53/4000][500/600] Loss_D: 0.6218 Loss_G: 2.7149 D(x): 0.7973 D(G(z)): 0.1312 / 0.1283
[54/4000][0/600] Loss_D: 0.4716 Loss_G: 2.4027 D(x): 0.8914 D(G(z)): 0.1840 / 0.1707
[54/4000][100/600] Loss_D: 0.4107 Loss_G: 2.6473 D(x): 0.8878 D(G(z)): 0.1444 / 0.1348
[54/4000][200/600] Loss_D: 0.3736 Loss_G: 3.2090 D(x): 0.8903 D(G(z)): 0.1182 / 0.0912
[54/4000][300/600] Loss_D: 0.3499 Loss_G: 2.7473 D(x): 0.8778 D(G(z)): 0.1410 / 0.1119
[54/4000][400/600] Loss_D: 0.7468 Loss_G: 2.5224 D(x): 0.7508 D(G(z)): 0.1651 / 0.1374
[54/4000][500/600] Loss_D: 0.4387 Loss_G: 2.6927 D(x): 0.8510 D(G(z)): 0.1280 / 0.1255
[55/4000][0/600] Loss_D: 0.3839 Loss_G: 3.0794 D(x): 0.8721 D(G(z)): 0.1195 / 0.0938
[55/4000][100/600] Loss_D: 0.4513 Loss_G: 2.7491 D(x): 0.8731 D(G(z)): 0.1329 / 0.1244
[55/4000][200/600] Loss_D: 0.3906 Loss_G: 3.2697 D(x): 0.8974 D(G(z)): 0.1317 / 0.0988
[55/4000][300/600] Loss_D: 0.4344 Loss_G: 2.7708 D(x): 0.8348 D(G(z)): 0.1132 / 0.0947
[55/4000][400/600] Loss_D: 0.5285 Loss_G: 2.7270 D(x): 0.8164 D(G(z)): 0.1568 / 0.1299
[55/4000][500/600] Loss_D: 0.7035 Loss_G: 2.4832 D(x): 0.7888 D(G(z)): 0.1646 / 0.1554
[56/4000][0/600] Loss_D: 0.4560 Loss_G: 2.3785 D(x): 0.8498 D(G(z)): 0.1589 / 0.1648
[56/4000][100/600] Loss_D: 0.4164 Loss_G: 2.7717 D(x): 0.8625 D(G(z)): 0.1413 / 0.1305
[56/4000][200/600] Loss_D: 0.3785 Loss_G: 2.7683 D(x): 0.9039 D(G(z)): 0.1524 / 0.1146
[56/4000][300/600] Loss_D: 0.4464 Loss_G: 2.6293 D(x): 0.8573 D(G(z)): 0.1483 / 0.1260
[56/4000][400/600] Loss_D: 0.5761 Loss_G: 2.5211 D(x): 0.8031 D(G(z)): 0.1576 / 0.1313
[56/4000][500/600] Loss_D: 0.5556 Loss_G: 2.5619 D(x): 0.8085 D(G(z)): 0.1368 / 0.1391
[57/4000][0/600] Loss_D: 0.4846 Loss_G: 2.0868 D(x): 0.8394 D(G(z)): 0.1795 / 0.1836
[57/4000][100/600] Loss_D: 0.4453 Loss_G: 2.4994 D(x): 0.8720 D(G(z)): 0.1552 / 0.1594
[57/4000][200/600] Loss_D: 0.3214 Loss_G: 3.1942 D(x): 0.9147 D(G(z)): 0.1358 / 0.0918
[57/4000][300/600] Loss_D: 0.4210 Loss_G: 2.8056 D(x): 0.8637 D(G(z)): 0.1394 / 0.1038
[57/4000][400/600] Loss_D: 0.5075 Loss_G: 2.5644 D(x): 0.8390 D(G(z)): 0.1706 / 0.1380
[57/4000][500/600] Loss_D: 0.5144 Loss_G: 2.4813 D(x): 0.8386 D(G(z)): 0.1508 / 0.1457
[58/4000][0/600] Loss_D: 0.3796 Loss_G: 2.7099 D(x): 0.8606 D(G(z)): 0.1314 / 0.1218
[58/4000][100/600] Loss_D: 0.3843 Loss_G: 2.5117 D(x): 0.8670 D(G(z)): 0.1244 / 0.1310
[58/4000][200/600] Loss_D: 0.3856 Loss_G: 2.5736 D(x): 0.9060 D(G(z)): 0.1595 / 0.1264
[58/4000][300/600] Loss_D: 0.4468 Loss_G: 2.5947 D(x): 0.8565 D(G(z)): 0.1595 / 0.1256
[58/4000][400/600] Loss_D: 0.4774 Loss_G: 2.4471 D(x): 0.8494 D(G(z)): 0.1656 / 0.1511
[58/4000][500/600] Loss_D: 0.5354 Loss_G: 2.7060 D(x): 0.8274 D(G(z)): 0.1277 / 0.1220
[59/4000][0/600] Loss_D: 0.4811 Loss_G: 2.2696 D(x): 0.8514 D(G(z)): 0.1628 / 0.1689
[59/4000][100/600] Loss_D: 0.4599 Loss_G: 2.6398 D(x): 0.8361 D(G(z)): 0.1415 / 0.1513
[59/4000][200/600] Loss_D: 0.3088 Loss_G: 3.0505 D(x): 0.8991 D(G(z)): 0.1027 / 0.0805
[59/4000][300/600] Loss_D: 0.4248 Loss_G: 2.9113 D(x): 0.8395 D(G(z)): 0.1237 / 0.0957
[59/4000][400/600] Loss_D: 0.5156 Loss_G: 2.5041 D(x): 0.8500 D(G(z)): 0.1749 / 0.1491
[59/4000][500/600] Loss_D: 0.6033 Loss_G: 2.7486 D(x): 0.7817 D(G(z)): 0.1219 / 0.1168
[60/4000][0/600] Loss_D: 0.4207 Loss_G: 2.3002 D(x): 0.8724 D(G(z)): 0.1736 / 0.1590
[60/4000][100/600] Loss_D: 0.5047 Loss_G: 2.4868 D(x): 0.8293 D(G(z)): 0.1428 / 0.1359
[60/4000][200/600] Loss_D: 0.4172 Loss_G: 2.6311 D(x): 0.9043 D(G(z)): 0.1745 / 0.1316
[60/4000][300/600] Loss_D: 0.4160 Loss_G: 2.6445 D(x): 0.8625 D(G(z)): 0.1577 / 0.1287
[60/4000][400/600] Loss_D: 0.4780 Loss_G: 2.6649 D(x): 0.8374 D(G(z)): 0.1439 / 0.1158
[60/4000][500/600] Loss_D: 0.5883 Loss_G: 2.6431 D(x): 0.7915 D(G(z)): 0.1374 / 0.1294
[61/4000][0/600] Loss_D: 0.5559 Loss_G: 2.2447 D(x): 0.8449 D(G(z)): 0.2026 / 0.1767
[61/4000][100/600] Loss_D: 0.4460 Loss_G: 2.4634 D(x): 0.8348 D(G(z)): 0.1420 / 0.1375
[61/4000][200/600] Loss_D: 0.3675 Loss_G: 2.7177 D(x): 0.8969 D(G(z)): 0.1480 / 0.1105
[61/4000][300/600] Loss_D: 0.5613 Loss_G: 2.4961 D(x): 0.8340 D(G(z)): 0.1818 / 0.1430
[61/4000][400/600] Loss_D: 0.4472 Loss_G: 2.6323 D(x): 0.8529 D(G(z)): 0.1547 / 0.1190
[61/4000][500/600] Loss_D: 0.5451 Loss_G: 2.6984 D(x): 0.8148 D(G(z)): 0.1348 / 0.1235
[62/4000][0/600] Loss_D: 0.4845 Loss_G: 2.4751 D(x): 0.8421 D(G(z)): 0.1628 / 0.1522
[62/4000][100/600] Loss_D: 0.4518 Loss_G: 2.4662 D(x): 0.8497 D(G(z)): 0.1661 / 0.1587
[62/4000][200/600] Loss_D: 0.4293 Loss_G: 2.4620 D(x): 0.9162 D(G(z)): 0.1944 / 0.1366
[62/4000][300/600] Loss_D: 0.5728 Loss_G: 2.6872 D(x): 0.8328 D(G(z)): 0.1630 / 0.1282
[62/4000][400/600] Loss_D: 0.6135 Loss_G: 2.4372 D(x): 0.8245 D(G(z)): 0.1851 / 0.1687
[62/4000][500/600] Loss_D: 0.5424 Loss_G: 2.5752 D(x): 0.8204 D(G(z)): 0.1536 / 0.1394
[63/4000][0/600] Loss_D: 0.4131 Loss_G: 2.5626 D(x): 0.8624 D(G(z)): 0.1460 / 0.1316
[63/4000][100/600] Loss_D: 0.5317 Loss_G: 2.4677 D(x): 0.8426 D(G(z)): 0.1705 / 0.1617
[63/4000][200/600] Loss_D: 0.3494 Loss_G: 3.0092 D(x): 0.9063 D(G(z)): 0.1404 / 0.0885
[63/4000][300/600] Loss_D: 0.5626 Loss_G: 2.9668 D(x): 0.8002 D(G(z)): 0.0988 / 0.0850
[63/4000][400/600] Loss_D: 0.6127 Loss_G: 2.0733 D(x): 0.8304 D(G(z)): 0.2181 / 0.1982
[63/4000][500/600] Loss_D: 0.5197 Loss_G: 2.5897 D(x): 0.8212 D(G(z)): 0.1296 / 0.1184
[64/4000][0/600] Loss_D: 0.5690 Loss_G: 2.4382 D(x): 0.8150 D(G(z)): 0.1742 / 0.1559
[64/4000][100/600] Loss_D: 0.5931 Loss_G: 2.2840 D(x): 0.8162 D(G(z)): 0.1916 / 0.1830
[64/4000][200/600] Loss_D: 0.3738 Loss_G: 2.4284 D(x): 0.9074 D(G(z)): 0.1810 / 0.1478
[64/4000][300/600] Loss_D: 0.6382 Loss_G: 1.9895 D(x): 0.8565 D(G(z)): 0.2532 / 0.2078
[64/4000][400/600] Loss_D: 0.5893 Loss_G: 2.3195 D(x): 0.8205 D(G(z)): 0.1878 / 0.1650
[64/4000][500/600] Loss_D: 0.6946 Loss_G: 2.3113 D(x): 0.7838 D(G(z)): 0.1544 / 0.1530
[65/4000][0/600] Loss_D: 0.4942 Loss_G: 2.2945 D(x): 0.8534 D(G(z)): 0.1856 / 0.1752
[65/4000][100/600] Loss_D: 0.5776 Loss_G: 2.3553 D(x): 0.8104 D(G(z)): 0.1681 / 0.1568
[65/4000][200/600] Loss_D: 0.3353 Loss_G: 2.8011 D(x): 0.9080 D(G(z)): 0.1506 / 0.1177
[65/4000][300/600] Loss_D: 0.5660 Loss_G: 2.5024 D(x): 0.8471 D(G(z)): 0.2076 / 0.1650
[65/4000][400/600] Loss_D: 0.7136 Loss_G: 2.3720 D(x): 0.8029 D(G(z)): 0.2044 / 0.1747
---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-12-b08d64d40419> in <module>()
     10         label.data.resize_(bs).fill_(real_label)
     11 
---> 12         output = net_D(input)
     13         errD_real = criteion(output, label)
     14         errD_real.backward()

~/anaconda3/lib/python3.6/site-packages/torch/nn/modules/module.py in __call__(self, *input, **kwargs)
    204 
    205     def __call__(self, *input, **kwargs):
--> 206         result = self.forward(*input, **kwargs)
    207         for hook in self._forward_hooks.values():
    208             hook_result = hook(self, input, result)

<ipython-input-6-0001a6f9f072> in forward(self, x)
     14     def forward(self, x):
     15         x = x.view(x.size(0), 784)
---> 16         x = self.main(x)
     17         return x
     18 

~/anaconda3/lib/python3.6/site-packages/torch/nn/modules/module.py in __call__(self, *input, **kwargs)
    204 
    205     def __call__(self, *input, **kwargs):
--> 206         result = self.forward(*input, **kwargs)
    207         for hook in self._forward_hooks.values():
    208             hook_result = hook(self, input, result)

~/anaconda3/lib/python3.6/site-packages/torch/nn/modules/container.py in forward(self, input)
     62     def forward(self, input):
     63         for module in self._modules.values():
---> 64             input = module(input)
     65         return input
     66 

~/anaconda3/lib/python3.6/site-packages/torch/nn/modules/module.py in __call__(self, *input, **kwargs)
    204 
    205     def __call__(self, *input, **kwargs):
--> 206         result = self.forward(*input, **kwargs)
    207         for hook in self._forward_hooks.values():
    208             hook_result = hook(self, input, result)

~/anaconda3/lib/python3.6/site-packages/torch/nn/modules/linear.py in forward(self, input)
     52             return self._backend.Linear()(input, self.weight)
     53         else:
---> 54             return self._backend.Linear()(input, self.weight, self.bias)
     55 
     56     def __repr__(self):

~/anaconda3/lib/python3.6/site-packages/torch/nn/_functions/linear.py in forward(self, input, weight, bias)
      8         self.save_for_backward(input, weight, bias)
      9         output = input.new(input.size(0), weight.size(0))
---> 10         output.addmm_(0, 1, input, weight.t())
     11         if bias is not None:
     12             # cuBLAS doesn't support 0 strides in sger, so we can't use expand

KeyboardInterrupt: 
In [13]:
fake = net_G(fixed_noise)
vutils.save_image(fake.data[:64], '%s/fake_samples2.png' % 'results' ,normalize=True)
In [14]:
from PIL import Image
im = Image.open("results/fake_samples2.png", "r")
plt.imshow(np.array(im))
Out[14]:
<matplotlib.image.AxesImage at 0x7f123d862f60>