DCGAN for MNIST (PyTorch)

Deep Convolution GANに以下の改善を行う。

  • すべてのプーリングレイヤを strided convolutions(discriminator)と fractional-stirided convolutions(generator)に変更する。
  • generator と discriminator に batchnormを使う。
  • 全結合隠れ層を取り除く。
  • ReLU 活性関数を generatorで使う。ただし、output層は tanhを使う。
  • LeakyReLU活性関数をdiscriminatorのすべての層で使う。

もとい!

公式チュートリアルにサンプルコードが公開されているので、それを参考に実装する。

In [2]:
% matplotlib inline
import torch
import torch.optim as optim
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable

if torch.cuda.is_available():
    import torch.cuda as t
else:
    import torch as t

import torchvision
from torchvision import datasets, models, transforms, utils
import torchvision.utils as vutils

import numpy as np
from numpy.random import normal
import matplotlib.pyplot as plt
import os

mnist datasetの準備

In [2]:
bs = 100
sz = 32
In [6]:
from torchvision.datasets import ImageFolder
from torchvision.transforms import ToTensor
imagenet_data = ImageFolder('/home/ubuntu/cutting-edge-dl-for-coders-part2/data/default/', 
                            transform=transforms.Compose([
                            transforms.Scale(sz),
                            transforms.ToTensor()]))
dataloader = torch.utils.data.DataLoader(imagenet_data,
                                         batch_size=bs,
                                         shuffle=True)

Model

In [7]:
nz = 100
ngf = 32
ndf = 32
nc = 3
In [8]:
'''Discriminater'''
class netD(nn.Module):
    def __init__(self):
        super(netD, self).__init__()
        self.main = nn.Sequential(
            nn.Conv2d(nc, ndf, 4, 2, 1, bias=False),
            nn.LeakyReLU(0.2, inplace=True),
            nn.Conv2d(ndf, ndf * 2, 4, 2, 1, bias=False),
            nn.BatchNorm2d(ndf * 2),
            nn.LeakyReLU(0.2, inplace=True),
            nn.Conv2d(ndf * 2, ndf * 4, 4, 2, 1, bias=False),
            nn.BatchNorm2d(ndf * 4),
            nn.LeakyReLU(0.2, inplace=True),
            nn.Conv2d(ndf * 4, 1 , 4, 1, 0, bias=False),
            nn.Sigmoid()
        )

    def forward(self, x):
        #x = x.view(100, -1)
        x = self.main(x)
        return x

'''Generator'''
class netG(nn.Module):
    def __init__(self):
        super(netG, self).__init__()
        self.main = nn.Sequential(
            nn.ConvTranspose2d(nz, ngf * 4, 4, 1, 0, bias=False),
            nn.BatchNorm2d(ngf * 4),
            nn.ReLU(True),
            nn.ConvTranspose2d(ngf * 4, ngf * 2, 4, 2, 1, bias=False),
            nn.BatchNorm2d(ngf * 2),
            nn.ReLU(True),
            nn.ConvTranspose2d(ngf * 2, ngf, 4, 2, 1, bias=False),
            nn.BatchNorm2d(ngf),
            nn.ReLU(True),
            nn.ConvTranspose2d( ngf,nc, 4, 2, 1, bias=False),
            nn.Tanh()
        )

    def forward(self, x):
        # x = x.view(bs,100)
        x = self.main(x)
        #x = x.view(-1, 1, sz, sz)
        return x
In [9]:
criteion = nn.BCELoss()
net_D = netD()
net_G = netG()

if torch.cuda.is_available():
    D = net_D.cuda()
    G = net_G.cuda()
    criteion = criteion.cuda()    
In [10]:
print(net_D)
netD (
  (main): Sequential (
    (0): Conv2d(3, 32, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (1): LeakyReLU (0.2, inplace)
    (2): Conv2d(32, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (3): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True)
    (4): LeakyReLU (0.2, inplace)
    (5): Conv2d(64, 128, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (6): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True)
    (7): LeakyReLU (0.2, inplace)
    (8): Conv2d(128, 1, kernel_size=(4, 4), stride=(1, 1), bias=False)
    (9): Sigmoid ()
  )
)
In [11]:
print(net_G)
netG (
  (main): Sequential (
    (0): ConvTranspose2d(100, 128, kernel_size=(4, 4), stride=(1, 1), bias=False)
    (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True)
    (2): ReLU (inplace)
    (3): ConvTranspose2d(128, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (4): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True)
    (5): ReLU (inplace)
    (6): ConvTranspose2d(64, 32, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (7): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True)
    (8): ReLU (inplace)
    (9): ConvTranspose2d(32, 3, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (10): Tanh ()
  )
)
In [12]:
optimizerD = optim.Adam(net_D.parameters(), lr = 0.00005)
optimizerG = optim.Adam(net_G.parameters(), lr = 0.00005)

Train

In [13]:
input = t.FloatTensor(bs, nc, sz, sz)
noise = t.FloatTensor(normal(0, 1, (bs, 100, 1, 1)))
fixed_noise = t.FloatTensor(bs, 100, 1, 1).normal_(0, 1)
label = t.FloatTensor(bs)

real_label = 1
fake_label = 0

input = Variable(input)
label = Variable(label)
noise = Variable(noise)
fixed_noise = Variable(fixed_noise)
In [14]:
niter = 4000
In [ ]:
for epoch in range(niter):
    for i, data in enumerate(dataloader, 0):
        ############################
        # (1) Update D network: maximize log(D(x)) + log(1 - D(G(z)))
        ###########################
        # train with real (data)
        net_D.zero_grad()
        real, _ = data
        input.data.resize_(real.size()).copy_(real)
        label.data.resize_(bs).fill_(real_label)
        output = net_D(input)
        errD_real = criteion(output, label)
        errD_real.backward()
        D_x = output.data.mean()

        #train with fake (generated)
        noise.data.resize_(bs, 100, 1, 1)
        noise.data.normal_(0, 1)
        fake = net_G(noise)
        label.data.fill_(fake_label)
        output = net_D(fake.detach())
        errD_fake = criteion(output, label)
        errD_fake.backward()
        D_G_z1 = output.data.mean()

        errD = errD_real + errD_fake
        optimizerD.step()

        ############################
        # (2) Update G network: maximize log(D(G(z)))
        ###########################
        net_G.zero_grad()
        label.data.fill_(real_label)
        output = net_D(fake)
        errG = criteion(output, label)
        errG.backward()
        D_G_z2 = output.data.mean()
        optimizerG.step()
        
        if i % 100 == 0:
            print('[%d/%d][%d/%d] Loss_D: %.4f Loss_G: %.4f D(x): %.4f D(G(z)): %.4f / %.4f'
                  % (epoch, niter, i, len(dataloader),
                     errD.data[0], errG.data[0],  D_x, D_G_z1, D_G_z2))
    if epoch % 10 == 0:
        fake = net_G(fixed_noise)
        vutils.save_image(fake.data, '%s/fake_samples_epoch_%03d.png'
                              % ('results', epoch),normalize=True)
[0/4000][0/100] Loss_D: 0.4049 Loss_G: 1.6940 D(x): 0.8760 D(G(z)): 0.2310 / 0.1891
[1/4000][0/100] Loss_D: 0.2647 Loss_G: 2.2955 D(x): 0.8981 D(G(z)): 0.1360 / 0.1052
[2/4000][0/100] Loss_D: 0.3044 Loss_G: 2.3675 D(x): 0.8820 D(G(z)): 0.1496 / 0.1069
[3/4000][0/100] Loss_D: 0.3290 Loss_G: 2.3378 D(x): 0.8591 D(G(z)): 0.1450 / 0.1067
[4/4000][0/100] Loss_D: 0.4345 Loss_G: 2.4491 D(x): 0.7695 D(G(z)): 0.1272 / 0.0943
[5/4000][0/100] Loss_D: 0.4679 Loss_G: 2.1508 D(x): 0.8217 D(G(z)): 0.2107 / 0.1351
[6/4000][0/100] Loss_D: 0.2948 Loss_G: 2.7199 D(x): 0.8445 D(G(z)): 0.1039 / 0.0771
[7/4000][0/100] Loss_D: 0.4960 Loss_G: 2.2441 D(x): 0.7508 D(G(z)): 0.1524 / 0.1188
[8/4000][0/100] Loss_D: 0.3113 Loss_G: 2.7032 D(x): 0.8493 D(G(z)): 0.1237 / 0.0773
[9/4000][0/100] Loss_D: 0.1932 Loss_G: 3.0354 D(x): 0.9071 D(G(z)): 0.0833 / 0.0543
[10/4000][0/100] Loss_D: 0.3096 Loss_G: 2.9119 D(x): 0.8532 D(G(z)): 0.1135 / 0.0691
[11/4000][0/100] Loss_D: 0.2173 Loss_G: 3.0430 D(x): 0.9015 D(G(z)): 0.0969 / 0.0599
[12/4000][0/100] Loss_D: 0.4308 Loss_G: 2.5355 D(x): 0.8060 D(G(z)): 0.1523 / 0.1078
[13/4000][0/100] Loss_D: 0.4249 Loss_G: 2.5583 D(x): 0.8081 D(G(z)): 0.1475 / 0.1103
[14/4000][0/100] Loss_D: 0.3359 Loss_G: 3.0752 D(x): 0.8209 D(G(z)): 0.0874 / 0.0569
[15/4000][0/100] Loss_D: 0.2054 Loss_G: 3.1428 D(x): 0.9062 D(G(z)): 0.0878 / 0.0644
[16/4000][0/100] Loss_D: 0.3651 Loss_G: 2.8941 D(x): 0.8339 D(G(z)): 0.1315 / 0.0719
[17/4000][0/100] Loss_D: 0.1666 Loss_G: 3.8871 D(x): 0.9055 D(G(z)): 0.0497 / 0.0282
[18/4000][0/100] Loss_D: 0.2157 Loss_G: 3.4911 D(x): 0.8861 D(G(z)): 0.0682 / 0.0473
[19/4000][0/100] Loss_D: 0.2218 Loss_G: 3.2230 D(x): 0.9060 D(G(z)): 0.1007 / 0.0564
[20/4000][0/100] Loss_D: 0.3289 Loss_G: 2.7762 D(x): 0.8723 D(G(z)): 0.1463 / 0.0834
[21/4000][0/100] Loss_D: 0.2807 Loss_G: 3.0610 D(x): 0.8665 D(G(z)): 0.1021 / 0.0681
[22/4000][0/100] Loss_D: 0.3187 Loss_G: 2.5577 D(x): 0.8789 D(G(z)): 0.1419 / 0.1080
[23/4000][0/100] Loss_D: 0.1368 Loss_G: 4.1411 D(x): 0.9399 D(G(z)): 0.0628 / 0.0330
[24/4000][0/100] Loss_D: 0.3456 Loss_G: 2.9579 D(x): 0.8306 D(G(z)): 0.0955 / 0.0757
[25/4000][0/100] Loss_D: 0.1948 Loss_G: 3.0858 D(x): 0.9399 D(G(z)): 0.1073 / 0.0733
[26/4000][0/100] Loss_D: 0.3855 Loss_G: 3.2845 D(x): 0.8243 D(G(z)): 0.1190 / 0.0609
[27/4000][0/100] Loss_D: 0.4120 Loss_G: 2.7199 D(x): 0.8096 D(G(z)): 0.1150 / 0.0957
[28/4000][0/100] Loss_D: 0.4894 Loss_G: 3.0436 D(x): 0.7800 D(G(z)): 0.1048 / 0.0833
[29/4000][0/100] Loss_D: 0.8739 Loss_G: 2.1338 D(x): 0.8151 D(G(z)): 0.4243 / 0.1596
[30/4000][0/100] Loss_D: 0.2988 Loss_G: 2.4274 D(x): 0.9150 D(G(z)): 0.1666 / 0.1281
[31/4000][0/100] Loss_D: 0.3170 Loss_G: 2.5486 D(x): 0.8588 D(G(z)): 0.1319 / 0.1058
[32/4000][0/100] Loss_D: 0.4307 Loss_G: 2.4441 D(x): 0.8024 D(G(z)): 0.1473 / 0.1141
[33/4000][0/100] Loss_D: 0.4028 Loss_G: 2.2585 D(x): 0.8488 D(G(z)): 0.1820 / 0.1474
[34/4000][0/100] Loss_D: 0.4683 Loss_G: 2.7041 D(x): 0.8207 D(G(z)): 0.1702 / 0.1119
[35/4000][0/100] Loss_D: 0.3071 Loss_G: 2.7606 D(x): 0.8926 D(G(z)): 0.1446 / 0.0969
[36/4000][0/100] Loss_D: 0.3540 Loss_G: 2.9385 D(x): 0.8197 D(G(z)): 0.1111 / 0.0799
[37/4000][0/100] Loss_D: 0.3580 Loss_G: 2.5705 D(x): 0.8475 D(G(z)): 0.1458 / 0.1012
[38/4000][0/100] Loss_D: 0.2469 Loss_G: 2.9230 D(x): 0.8971 D(G(z)): 0.1123 / 0.0787
[39/4000][0/100] Loss_D: 0.4518 Loss_G: 2.5479 D(x): 0.8344 D(G(z)): 0.1818 / 0.1190
[40/4000][0/100] Loss_D: 0.2763 Loss_G: 2.9390 D(x): 0.8634 D(G(z)): 0.0959 / 0.0760
[41/4000][0/100] Loss_D: 0.2932 Loss_G: 2.8342 D(x): 0.8652 D(G(z)): 0.1097 / 0.0836
[42/4000][0/100] Loss_D: 0.4925 Loss_G: 2.1592 D(x): 0.8041 D(G(z)): 0.1803 / 0.1609
[43/4000][0/100] Loss_D: 0.3032 Loss_G: 2.8758 D(x): 0.8718 D(G(z)): 0.1301 / 0.0931
[44/4000][0/100] Loss_D: 0.7978 Loss_G: 2.3334 D(x): 0.7165 D(G(z)): 0.2316 / 0.1551
[45/4000][0/100] Loss_D: 0.4061 Loss_G: 2.6933 D(x): 0.8499 D(G(z)): 0.1708 / 0.1179
[46/4000][0/100] Loss_D: 0.3570 Loss_G: 2.9281 D(x): 0.8629 D(G(z)): 0.1557 / 0.0922
[47/4000][0/100] Loss_D: 0.4717 Loss_G: 2.5250 D(x): 0.7754 D(G(z)): 0.1376 / 0.1167
[48/4000][0/100] Loss_D: 0.3429 Loss_G: 2.7918 D(x): 0.8810 D(G(z)): 0.1408 / 0.0927
[49/4000][0/100] Loss_D: 0.4571 Loss_G: 2.8704 D(x): 0.7889 D(G(z)): 0.1170 / 0.0915
[50/4000][0/100] Loss_D: 0.3740 Loss_G: 2.5480 D(x): 0.9179 D(G(z)): 0.2183 / 0.1302
[51/4000][0/100] Loss_D: 0.3307 Loss_G: 2.8535 D(x): 0.8890 D(G(z)): 0.1598 / 0.0947
[52/4000][0/100] Loss_D: 0.2358 Loss_G: 3.5288 D(x): 0.9238 D(G(z)): 0.1252 / 0.0533
[53/4000][0/100] Loss_D: 0.4603 Loss_G: 2.8176 D(x): 0.8426 D(G(z)): 0.1693 / 0.1123
[54/4000][0/100] Loss_D: 0.3713 Loss_G: 3.0801 D(x): 0.8447 D(G(z)): 0.1322 / 0.0873
[55/4000][0/100] Loss_D: 0.4645 Loss_G: 2.4591 D(x): 0.8610 D(G(z)): 0.2110 / 0.1420
[56/4000][0/100] Loss_D: 0.3985 Loss_G: 2.6912 D(x): 0.8415 D(G(z)): 0.1548 / 0.1063
[57/4000][0/100] Loss_D: 0.5195 Loss_G: 2.4593 D(x): 0.8152 D(G(z)): 0.2108 / 0.1278
[58/4000][0/100] Loss_D: 0.2022 Loss_G: 3.4499 D(x): 0.9186 D(G(z)): 0.0958 / 0.0580
[59/4000][0/100] Loss_D: 0.2689 Loss_G: 3.1476 D(x): 0.8824 D(G(z)): 0.1072 / 0.0763
[60/4000][0/100] Loss_D: 0.3676 Loss_G: 3.0577 D(x): 0.8585 D(G(z)): 0.1328 / 0.0908
[61/4000][0/100] Loss_D: 0.2325 Loss_G: 3.1628 D(x): 0.9025 D(G(z)): 0.1061 / 0.0633
[62/4000][0/100] Loss_D: 0.2838 Loss_G: 2.9311 D(x): 0.8964 D(G(z)): 0.1196 / 0.0921
[63/4000][0/100] Loss_D: 0.4589 Loss_G: 3.0041 D(x): 0.8375 D(G(z)): 0.1925 / 0.1018
[64/4000][0/100] Loss_D: 0.3052 Loss_G: 2.8047 D(x): 0.9077 D(G(z)): 0.1637 / 0.1024
[65/4000][0/100] Loss_D: 0.3181 Loss_G: 2.9996 D(x): 0.8618 D(G(z)): 0.1132 / 0.0838
[66/4000][0/100] Loss_D: 0.2987 Loss_G: 2.9505 D(x): 0.8977 D(G(z)): 0.1430 / 0.0936
[67/4000][0/100] Loss_D: 0.6031 Loss_G: 2.3479 D(x): 0.7775 D(G(z)): 0.2069 / 0.1421
[68/4000][0/100] Loss_D: 0.5542 Loss_G: 2.3418 D(x): 0.8342 D(G(z)): 0.2371 / 0.1520
[69/4000][0/100] Loss_D: 0.3477 Loss_G: 2.9715 D(x): 0.8527 D(G(z)): 0.1323 / 0.0869
[70/4000][0/100] Loss_D: 0.5255 Loss_G: 2.2913 D(x): 0.8091 D(G(z)): 0.2027 / 0.1618
[71/4000][0/100] Loss_D: 0.3711 Loss_G: 2.5286 D(x): 0.8721 D(G(z)): 0.1664 / 0.1424
[72/4000][0/100] Loss_D: 0.3346 Loss_G: 2.5684 D(x): 0.8918 D(G(z)): 0.1523 / 0.1122
[73/4000][0/100] Loss_D: 0.5151 Loss_G: 2.4392 D(x): 0.8225 D(G(z)): 0.2086 / 0.1388
[74/4000][0/100] Loss_D: 0.6967 Loss_G: 2.4096 D(x): 0.7328 D(G(z)): 0.2127 / 0.1509
[75/4000][0/100] Loss_D: 0.2442 Loss_G: 3.5009 D(x): 0.8857 D(G(z)): 0.0839 / 0.0653
[76/4000][0/100] Loss_D: 0.4404 Loss_G: 2.5271 D(x): 0.8434 D(G(z)): 0.1726 / 0.1274
[77/4000][0/100] Loss_D: 0.2834 Loss_G: 3.5429 D(x): 0.8667 D(G(z)): 0.0751 / 0.0526
[78/4000][0/100] Loss_D: 0.3381 Loss_G: 2.7355 D(x): 0.8733 D(G(z)): 0.1429 / 0.1122
[79/4000][0/100] Loss_D: 0.5694 Loss_G: 2.3463 D(x): 0.7925 D(G(z)): 0.2129 / 0.1591
[80/4000][0/100] Loss_D: 0.4753 Loss_G: 2.5436 D(x): 0.8202 D(G(z)): 0.1693 / 0.1174
[81/4000][0/100] Loss_D: 0.4815 Loss_G: 2.4250 D(x): 0.8316 D(G(z)): 0.1988 / 0.1408
[82/4000][0/100] Loss_D: 0.4737 Loss_G: 2.6769 D(x): 0.7827 D(G(z)): 0.1383 / 0.1206
[83/4000][0/100] Loss_D: 0.4637 Loss_G: 2.3420 D(x): 0.8602 D(G(z)): 0.2162 / 0.1571
[84/4000][0/100] Loss_D: 0.6439 Loss_G: 2.0996 D(x): 0.7643 D(G(z)): 0.2317 / 0.1812
[85/4000][0/100] Loss_D: 0.6072 Loss_G: 2.5143 D(x): 0.7432 D(G(z)): 0.1842 / 0.1387
[86/4000][0/100] Loss_D: 0.6275 Loss_G: 2.1913 D(x): 0.7697 D(G(z)): 0.2308 / 0.1710
[87/4000][0/100] Loss_D: 0.6701 Loss_G: 2.0496 D(x): 0.7347 D(G(z)): 0.2274 / 0.1828
[88/4000][0/100] Loss_D: 0.4150 Loss_G: 2.7037 D(x): 0.8449 D(G(z)): 0.1834 / 0.1143
[89/4000][0/100] Loss_D: 0.4655 Loss_G: 2.4087 D(x): 0.8054 D(G(z)): 0.1641 / 0.1348
[90/4000][0/100] Loss_D: 0.6238 Loss_G: 2.3890 D(x): 0.7296 D(G(z)): 0.1812 / 0.1513
[91/4000][0/100] Loss_D: 0.6798 Loss_G: 2.3021 D(x): 0.7197 D(G(z)): 0.2012 / 0.1520
[92/4000][0/100] Loss_D: 0.3289 Loss_G: 2.9302 D(x): 0.8541 D(G(z)): 0.1270 / 0.0939
[93/4000][0/100] Loss_D: 0.5013 Loss_G: 2.4595 D(x): 0.7766 D(G(z)): 0.1526 / 0.1258
[94/4000][0/100] Loss_D: 0.5064 Loss_G: 2.2673 D(x): 0.8044 D(G(z)): 0.1989 / 0.1460
[95/4000][0/100] Loss_D: 0.3882 Loss_G: 2.4698 D(x): 0.8672 D(G(z)): 0.1720 / 0.1450
[96/4000][0/100] Loss_D: 0.6628 Loss_G: 2.1897 D(x): 0.7633 D(G(z)): 0.2371 / 0.1767
[97/4000][0/100] Loss_D: 0.4624 Loss_G: 2.5559 D(x): 0.8429 D(G(z)): 0.1850 / 0.1312
[98/4000][0/100] Loss_D: 0.8268 Loss_G: 1.7519 D(x): 0.7016 D(G(z)): 0.2773 / 0.2533
[99/4000][0/100] Loss_D: 0.2922 Loss_G: 2.7660 D(x): 0.9084 D(G(z)): 0.1478 / 0.1075
[100/4000][0/100] Loss_D: 0.7438 Loss_G: 2.0123 D(x): 0.7910 D(G(z)): 0.3033 / 0.2102
[101/4000][0/100] Loss_D: 0.4372 Loss_G: 3.0693 D(x): 0.7928 D(G(z)): 0.1104 / 0.0813
[102/4000][0/100] Loss_D: 0.6588 Loss_G: 1.9957 D(x): 0.7769 D(G(z)): 0.2504 / 0.2211
[103/4000][0/100] Loss_D: 0.5444 Loss_G: 2.4065 D(x): 0.7951 D(G(z)): 0.1959 / 0.1473
[104/4000][0/100] Loss_D: 0.3335 Loss_G: 2.6539 D(x): 0.8638 D(G(z)): 0.1341 / 0.1112
[105/4000][0/100] Loss_D: 0.5204 Loss_G: 2.7411 D(x): 0.7603 D(G(z)): 0.1466 / 0.1064
[106/4000][0/100] Loss_D: 0.4321 Loss_G: 2.8949 D(x): 0.8305 D(G(z)): 0.1571 / 0.1128
[107/4000][0/100] Loss_D: 0.4448 Loss_G: 2.2005 D(x): 0.8652 D(G(z)): 0.2115 / 0.1790
[108/4000][0/100] Loss_D: 0.6347 Loss_G: 2.1106 D(x): 0.7218 D(G(z)): 0.1909 / 0.1767
[109/4000][0/100] Loss_D: 0.3774 Loss_G: 2.9269 D(x): 0.8160 D(G(z)): 0.1252 / 0.0870
[110/4000][0/100] Loss_D: 0.5229 Loss_G: 2.3037 D(x): 0.8009 D(G(z)): 0.2121 / 0.1635
[111/4000][0/100] Loss_D: 0.6173 Loss_G: 2.0704 D(x): 0.8208 D(G(z)): 0.2654 / 0.2027
[112/4000][0/100] Loss_D: 0.6394 Loss_G: 1.9421 D(x): 0.8054 D(G(z)): 0.2695 / 0.2076
[113/4000][0/100] Loss_D: 0.7449 Loss_G: 1.8903 D(x): 0.7680 D(G(z)): 0.2805 / 0.2347
[114/4000][0/100] Loss_D: 0.3053 Loss_G: 2.9876 D(x): 0.8685 D(G(z)): 0.1222 / 0.0898
[115/4000][0/100] Loss_D: 0.3756 Loss_G: 2.6208 D(x): 0.8392 D(G(z)): 0.1465 / 0.1116
[116/4000][0/100] Loss_D: 0.8943 Loss_G: 1.9070 D(x): 0.6919 D(G(z)): 0.3077 / 0.2296
[117/4000][0/100] Loss_D: 0.6038 Loss_G: 2.0293 D(x): 0.8298 D(G(z)): 0.2791 / 0.1842
[118/4000][0/100] Loss_D: 0.7520 Loss_G: 2.0925 D(x): 0.7297 D(G(z)): 0.2525 / 0.1935
[119/4000][0/100] Loss_D: 0.8667 Loss_G: 1.7651 D(x): 0.7774 D(G(z)): 0.3632 / 0.2666
[120/4000][0/100] Loss_D: 0.8806 Loss_G: 1.8412 D(x): 0.6697 D(G(z)): 0.2766 / 0.2077
[121/4000][0/100] Loss_D: 0.6134 Loss_G: 1.8948 D(x): 0.8045 D(G(z)): 0.2776 / 0.2093
[122/4000][0/100] Loss_D: 0.6683 Loss_G: 2.3404 D(x): 0.7144 D(G(z)): 0.1727 / 0.1530
[123/4000][0/100] Loss_D: 0.4724 Loss_G: 2.7651 D(x): 0.7817 D(G(z)): 0.1398 / 0.1002
[124/4000][0/100] Loss_D: 0.7187 Loss_G: 2.1986 D(x): 0.6951 D(G(z)): 0.1993 / 0.1690
[125/4000][0/100] Loss_D: 0.4526 Loss_G: 2.5517 D(x): 0.8244 D(G(z)): 0.1852 / 0.1372
[126/4000][0/100] Loss_D: 0.5199 Loss_G: 2.3269 D(x): 0.8160 D(G(z)): 0.1976 / 0.1506
[127/4000][0/100] Loss_D: 0.4167 Loss_G: 2.5393 D(x): 0.8435 D(G(z)): 0.1771 / 0.1385
[128/4000][0/100] Loss_D: 0.7163 Loss_G: 1.8251 D(x): 0.7950 D(G(z)): 0.3007 / 0.2403
[129/4000][0/100] Loss_D: 0.7194 Loss_G: 1.8624 D(x): 0.7378 D(G(z)): 0.2562 / 0.2228
[130/4000][0/100] Loss_D: 0.6828 Loss_G: 2.2691 D(x): 0.7537 D(G(z)): 0.2378 / 0.1647
[131/4000][0/100] Loss_D: 1.0801 Loss_G: 1.3464 D(x): 0.6806 D(G(z)): 0.3914 / 0.3422
[132/4000][0/100] Loss_D: 0.6028 Loss_G: 1.9583 D(x): 0.7877 D(G(z)): 0.2411 / 0.2077
[133/4000][0/100] Loss_D: 0.8065 Loss_G: 2.0626 D(x): 0.7163 D(G(z)): 0.2674 / 0.2080
[134/4000][0/100] Loss_D: 0.7059 Loss_G: 2.1576 D(x): 0.7588 D(G(z)): 0.2775 / 0.1881
[135/4000][0/100] Loss_D: 0.7512 Loss_G: 2.0741 D(x): 0.7454 D(G(z)): 0.2680 / 0.1846
[136/4000][0/100] Loss_D: 0.4585 Loss_G: 2.3057 D(x): 0.8383 D(G(z)): 0.1956 / 0.1574
[137/4000][0/100] Loss_D: 0.6777 Loss_G: 1.8878 D(x): 0.7483 D(G(z)): 0.2454 / 0.2107
[138/4000][0/100] Loss_D: 0.7156 Loss_G: 2.0643 D(x): 0.7857 D(G(z)): 0.3051 / 0.1962
[139/4000][0/100] Loss_D: 0.7526 Loss_G: 1.9920 D(x): 0.6994 D(G(z)): 0.2515 / 0.1817
[140/4000][0/100] Loss_D: 0.6892 Loss_G: 2.1277 D(x): 0.7260 D(G(z)): 0.2088 / 0.1945
[141/4000][0/100] Loss_D: 0.7241 Loss_G: 1.9700 D(x): 0.7289 D(G(z)): 0.2385 / 0.2140
[142/4000][0/100] Loss_D: 0.8310 Loss_G: 1.5783 D(x): 0.7371 D(G(z)): 0.3197 / 0.2853
[143/4000][0/100] Loss_D: 0.8042 Loss_G: 1.6990 D(x): 0.7000 D(G(z)): 0.2763 / 0.2479
[144/4000][0/100] Loss_D: 0.2473 Loss_G: 2.9240 D(x): 0.9073 D(G(z)): 0.1193 / 0.0925
[145/4000][0/100] Loss_D: 0.8645 Loss_G: 1.6541 D(x): 0.7306 D(G(z)): 0.3470 / 0.2533
[146/4000][0/100] Loss_D: 0.4244 Loss_G: 2.3140 D(x): 0.8491 D(G(z)): 0.1941 / 0.1572
[147/4000][0/100] Loss_D: 0.9759 Loss_G: 1.8104 D(x): 0.6166 D(G(z)): 0.2414 / 0.2290
[148/4000][0/100] Loss_D: 0.4767 Loss_G: 2.1629 D(x): 0.8232 D(G(z)): 0.2021 / 0.1821
[149/4000][0/100] Loss_D: 0.6569 Loss_G: 1.7163 D(x): 0.7954 D(G(z)): 0.2804 / 0.2516
[150/4000][0/100] Loss_D: 0.7609 Loss_G: 1.8352 D(x): 0.7439 D(G(z)): 0.3075 / 0.2252
[151/4000][0/100] Loss_D: 0.9506 Loss_G: 1.8019 D(x): 0.6086 D(G(z)): 0.2755 / 0.2356
[152/4000][0/100] Loss_D: 0.6992 Loss_G: 1.6985 D(x): 0.7126 D(G(z)): 0.2471 / 0.2381
[153/4000][0/100] Loss_D: 0.4271 Loss_G: 2.2183 D(x): 0.8413 D(G(z)): 0.1901 / 0.1584
[154/4000][0/100] Loss_D: 0.7600 Loss_G: 2.0155 D(x): 0.7264 D(G(z)): 0.2607 / 0.1990
[155/4000][0/100] Loss_D: 0.3193 Loss_G: 2.6263 D(x): 0.8848 D(G(z)): 0.1495 / 0.1204
[156/4000][0/100] Loss_D: 0.7012 Loss_G: 2.1194 D(x): 0.7307 D(G(z)): 0.2495 / 0.1723
[157/4000][0/100] Loss_D: 0.9367 Loss_G: 1.8490 D(x): 0.6363 D(G(z)): 0.2768 / 0.2278
[158/4000][0/100] Loss_D: 0.8777 Loss_G: 1.7488 D(x): 0.6489 D(G(z)): 0.2641 / 0.2524
[159/4000][0/100] Loss_D: 0.7009 Loss_G: 1.7651 D(x): 0.7975 D(G(z)): 0.3041 / 0.2492
[160/4000][0/100] Loss_D: 1.0481 Loss_G: 1.5933 D(x): 0.6271 D(G(z)): 0.3438 / 0.2665
[161/4000][0/100] Loss_D: 0.4550 Loss_G: 2.3373 D(x): 0.8162 D(G(z)): 0.1872 / 0.1386
[162/4000][0/100] Loss_D: 0.9684 Loss_G: 1.4119 D(x): 0.7473 D(G(z)): 0.4090 / 0.3193
[163/4000][0/100] Loss_D: 0.6739 Loss_G: 2.2782 D(x): 0.7199 D(G(z)): 0.2152 / 0.1665
[164/4000][0/100] Loss_D: 0.7739 Loss_G: 1.7563 D(x): 0.6996 D(G(z)): 0.2751 / 0.2327
[165/4000][0/100] Loss_D: 0.5730 Loss_G: 2.1929 D(x): 0.7657 D(G(z)): 0.1993 / 0.1682
[166/4000][0/100] Loss_D: 0.6845 Loss_G: 2.0072 D(x): 0.7642 D(G(z)): 0.2697 / 0.1986
[167/4000][0/100] Loss_D: 0.7225 Loss_G: 1.7906 D(x): 0.7686 D(G(z)): 0.3033 / 0.2300
[168/4000][0/100] Loss_D: 0.6685 Loss_G: 2.0939 D(x): 0.7054 D(G(z)): 0.2014 / 0.1777
[169/4000][0/100] Loss_D: 0.9570 Loss_G: 1.7590 D(x): 0.6325 D(G(z)): 0.2856 / 0.2364
[170/4000][0/100] Loss_D: 0.7833 Loss_G: 1.7069 D(x): 0.7464 D(G(z)): 0.2822 / 0.2527
[171/4000][0/100] Loss_D: 0.4685 Loss_G: 2.3303 D(x): 0.8052 D(G(z)): 0.1764 / 0.1531
[172/4000][0/100] Loss_D: 1.0011 Loss_G: 1.6177 D(x): 0.7124 D(G(z)): 0.3774 / 0.2759
[173/4000][0/100] Loss_D: 0.4390 Loss_G: 2.4449 D(x): 0.8120 D(G(z)): 0.1613 / 0.1328
[174/4000][0/100] Loss_D: 0.3941 Loss_G: 2.9966 D(x): 0.7948 D(G(z)): 0.1035 / 0.0820
[175/4000][0/100] Loss_D: 0.8711 Loss_G: 1.6038 D(x): 0.7221 D(G(z)): 0.3213 / 0.2682
[176/4000][0/100] Loss_D: 0.6338 Loss_G: 1.9471 D(x): 0.7781 D(G(z)): 0.2568 / 0.2140
[177/4000][0/100] Loss_D: 0.7366 Loss_G: 1.8599 D(x): 0.6996 D(G(z)): 0.2395 / 0.2204
[178/4000][0/100] Loss_D: 0.8713 Loss_G: 1.7078 D(x): 0.6542 D(G(z)): 0.2599 / 0.2363
[179/4000][0/100] Loss_D: 0.7214 Loss_G: 1.7861 D(x): 0.7193 D(G(z)): 0.2706 / 0.2286
[180/4000][0/100] Loss_D: 0.5402 Loss_G: 1.8803 D(x): 0.8278 D(G(z)): 0.2449 / 0.2172
In [ ]:
fake = net_G(fixed_noise)
vutils.save_image(fake.data[:64], '%s/fake_samples4.png' % 'results' ,normalize=True)
In [3]:
from PIL import Image
im = Image.open("results/fake_samples4.png", "r")
plt.imshow(np.array(im))
Out[3]:
<matplotlib.image.AxesImage at 0x7fa12fcd8a90>
In [ ]: