Vanilla GAN for MNIST (PyTorch)

In [16]:
% matplotlib inline
import torch
import torch.optim as optim
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable

if torch.cuda.is_available():
    import torch.cuda as t
else:
    import torch as t

from torchvision import datasets, models, transforms, utils
import torchvision.utils as vutils

import numpy as np
from numpy.random import uniform
import matplotlib.pyplot as plt
import os

mnist datasetの準備

In [17]:
bs = 100
In [18]:
dataloader = torch.utils.data.DataLoader(
    datasets.MNIST('data/mnist', train=True, download=True,
                   transform=transforms.Compose([
                       transforms.ToTensor()
                   ])),
    batch_size=bs
)

Model

In [19]:
'''Discriminater'''
class netD(nn.Module):
    def __init__(self):
        super(netD, self).__init__()
        self.main = nn.Sequential(
            nn.Linear(784, 300),
            nn.ReLU(),
            nn.Linear(300, 256),
            nn.ReLU(),
            nn.Linear(256, 1),
            nn.Sigmoid()
        )

    def forward(self, x):
        x = x.view(x.size(0), 784)
        x = self.main(x)
        return x

'''Generator'''
class netG(nn.Module):
    def __init__(self):
        super(netG, self).__init__()
        self.main = nn.Sequential(
            nn.Linear(100, 200),
            nn.ReLU(),
            nn.Linear(200, 400),
            nn.ReLU(),
            nn.Linear(400, 784),
            nn.Sigmoid()
        )

    def forward(self, x):
        x = x.view(bs,100)
        x = self.main(x)
        x = x.view(-1, 1, 28, 28)
        return x
In [20]:
criteion = nn.BCELoss()
net_D = netD()
net_G = netG()

if torch.cuda.is_available():
    D = net_D.cuda()
    G = net_G.cuda()
    criteion = criteion.cuda()    
In [21]:
optimizerD = optim.Adam(net_D.parameters(), lr = 1e-4)
optimizerG = optim.Adam(net_G.parameters(), lr = 1e-4)

Train

In [22]:
input = t.FloatTensor(bs, 1, 28, 28)
noise = t.FloatTensor(uniform(0,1,(bs, 100, 1, 1)))
fixed_noise = t.FloatTensor(bs, 100, 1, 1).normal_(0, 1)
label = t.FloatTensor(bs)

real_label = 1
fake_label = 0

input = Variable(input)
label = Variable(label)
noise = Variable(noise)
fixed_noise = Variable(fixed_noise)
In [23]:
niter = 4000
In [24]:
for epoch in range(niter):
    for i, data in enumerate(dataloader, 0):
        ############################
        # (1) Update D network: maximize log(D(x)) + log(1 - D(G(z)))
        ###########################
        # train with real (data)
        net_D.zero_grad()
        real, _ = data
        input.data.resize_(real.size()).copy_(real)
        label.data.resize_(bs).fill_(real_label)

        output = net_D(input)
        errD_real = criteion(output, label)
        errD_real.backward()
        D_x = output.data.mean()

        #train with fake (generated)
        noise.data.resize_(bs, 100, 1, 1)
        noise.data.normal_(0, 1)
        fake = net_G(noise)
        label.data.fill_(fake_label)
        output = net_D(fake.detach())
        errD_fake = criteion(output, label)
        errD_fake.backward()
        D_G_z1 = output.data.mean()

        errD = errD_real + errD_fake
        optimizerD.step()

        ############################
        # (2) Update G network: maximize log(D(G(z)))
        ###########################
        net_G.zero_grad()
        label.data.fill_(real_label)
        output = net_D(fake)
        errG = criteion(output, label)
        errG.backward()
        D_G_z2 = output.data.mean()
        optimizerG.step()
        if i % 100 == 0:
            print('[%d/%d][%d/%d] Loss_D: %.4f Loss_G: %.4f D(x): %.4f D(G(z)): %.4f / %.4f'
                 % (epoch, niter, i, len(dataloader),
                   errD.data[0], errG.data[0],  D_x, D_G_z1, D_G_z2))
    if epoch % 10 == 0:
        fake = net_G(fixed_noise)
        vutils.save_image(fake.data, '%s/fake_samples_epoch_%03d.png'
                              % ('results', epoch),normalize=True)
[0/4000][0/600] Loss_D: 1.3774 Loss_G: 0.7734 D(x): 0.4849 D(G(z)): 0.4798 / 0.4614
[0/4000][100/600] Loss_D: 0.4435 Loss_G: 2.5407 D(x): 0.7520 D(G(z)): 0.1298 / 0.0979
[0/4000][200/600] Loss_D: 0.3307 Loss_G: 2.3395 D(x): 0.8486 D(G(z)): 0.1452 / 0.0966
[0/4000][300/600] Loss_D: 0.2424 Loss_G: 3.3446 D(x): 0.8489 D(G(z)): 0.0564 / 0.0354
[0/4000][400/600] Loss_D: 0.0658 Loss_G: 4.1486 D(x): 0.9686 D(G(z)): 0.0284 / 0.0162
[0/4000][500/600] Loss_D: 0.0548 Loss_G: 4.4317 D(x): 0.9709 D(G(z)): 0.0225 / 0.0120
[1/4000][0/600] Loss_D: 0.0952 Loss_G: 4.8037 D(x): 0.9463 D(G(z)): 0.0218 / 0.0084
[1/4000][100/600] Loss_D: 0.0744 Loss_G: 5.2767 D(x): 0.9725 D(G(z)): 0.0189 / 0.0054
[1/4000][200/600] Loss_D: 0.1847 Loss_G: 4.5582 D(x): 0.9241 D(G(z)): 0.0299 / 0.0110
[1/4000][300/600] Loss_D: 0.0385 Loss_G: 4.9896 D(x): 0.9814 D(G(z)): 0.0149 / 0.0069
[1/4000][400/600] Loss_D: 0.0217 Loss_G: 4.8109 D(x): 0.9934 D(G(z)): 0.0147 / 0.0084
[1/4000][500/600] Loss_D: 0.0118 Loss_G: 5.6721 D(x): 0.9940 D(G(z)): 0.0055 / 0.0035
[2/4000][0/600] Loss_D: 0.0204 Loss_G: 5.1875 D(x): 0.9904 D(G(z)): 0.0105 / 0.0058
[2/4000][100/600] Loss_D: 0.0306 Loss_G: 5.2118 D(x): 0.9855 D(G(z)): 0.0108 / 0.0056
[2/4000][200/600] Loss_D: 0.0172 Loss_G: 5.6072 D(x): 0.9932 D(G(z)): 0.0102 / 0.0040
[2/4000][300/600] Loss_D: 0.0056 Loss_G: 6.3004 D(x): 0.9978 D(G(z)): 0.0034 / 0.0019
[2/4000][400/600] Loss_D: 0.0148 Loss_G: 5.9962 D(x): 0.9915 D(G(z)): 0.0038 / 0.0025
[2/4000][500/600] Loss_D: 0.0046 Loss_G: 6.5175 D(x): 0.9978 D(G(z)): 0.0023 / 0.0015
[3/4000][0/600] Loss_D: 0.0075 Loss_G: 6.4266 D(x): 0.9955 D(G(z)): 0.0025 / 0.0016
[3/4000][100/600] Loss_D: 0.0039 Loss_G: 6.5081 D(x): 0.9984 D(G(z)): 0.0022 / 0.0015
[3/4000][200/600] Loss_D: 0.0193 Loss_G: 6.0765 D(x): 0.9865 D(G(z)): 0.0030 / 0.0024
[3/4000][300/600] Loss_D: 0.0052 Loss_G: 6.4796 D(x): 0.9973 D(G(z)): 0.0023 / 0.0015
[3/4000][400/600] Loss_D: 0.0185 Loss_G: 5.8766 D(x): 0.9876 D(G(z)): 0.0043 / 0.0031
[3/4000][500/600] Loss_D: 0.0157 Loss_G: 5.1374 D(x): 0.9927 D(G(z)): 0.0082 / 0.0061
[4/4000][0/600] Loss_D: 0.0199 Loss_G: 5.7099 D(x): 0.9906 D(G(z)): 0.0101 / 0.0035
[4/4000][100/600] Loss_D: 0.0237 Loss_G: 5.1928 D(x): 0.9912 D(G(z)): 0.0111 / 0.0057
[4/4000][200/600] Loss_D: 0.0082 Loss_G: 6.1906 D(x): 0.9980 D(G(z)): 0.0061 / 0.0021
[4/4000][300/600] Loss_D: 0.0095 Loss_G: 5.6986 D(x): 0.9988 D(G(z)): 0.0083 / 0.0035
[4/4000][400/600] Loss_D: 0.0070 Loss_G: 7.1436 D(x): 0.9950 D(G(z)): 0.0017 / 0.0008
[4/4000][500/600] Loss_D: 0.0188 Loss_G: 6.3601 D(x): 0.9910 D(G(z)): 0.0027 / 0.0018
[5/4000][0/600] Loss_D: 0.0034 Loss_G: 6.8444 D(x): 0.9999 D(G(z)): 0.0033 / 0.0011
[5/4000][100/600] Loss_D: 0.0163 Loss_G: 5.5472 D(x): 0.9920 D(G(z)): 0.0064 / 0.0041
[5/4000][200/600] Loss_D: 0.0048 Loss_G: 6.2294 D(x): 0.9989 D(G(z)): 0.0037 / 0.0020
[5/4000][300/600] Loss_D: 0.0023 Loss_G: 6.9944 D(x): 0.9995 D(G(z)): 0.0017 / 0.0010
[5/4000][400/600] Loss_D: 0.0074 Loss_G: 6.3733 D(x): 0.9963 D(G(z)): 0.0031 / 0.0018
[5/4000][500/600] Loss_D: 0.0092 Loss_G: 7.1238 D(x): 0.9941 D(G(z)): 0.0015 / 0.0009
[6/4000][0/600] Loss_D: 0.0050 Loss_G: 5.8230 D(x): 0.9998 D(G(z)): 0.0048 / 0.0031
[6/4000][100/600] Loss_D: 0.0041 Loss_G: 6.4950 D(x): 0.9984 D(G(z)): 0.0025 / 0.0016
[6/4000][200/600] Loss_D: 0.0088 Loss_G: 6.4044 D(x): 0.9951 D(G(z)): 0.0030 / 0.0019
[6/4000][300/600] Loss_D: 0.0058 Loss_G: 6.5578 D(x): 0.9978 D(G(z)): 0.0035 / 0.0018
[6/4000][400/600] Loss_D: 0.0098 Loss_G: 5.1840 D(x): 0.9994 D(G(z)): 0.0092 / 0.0059
[6/4000][500/600] Loss_D: 0.0021 Loss_G: 7.2978 D(x): 0.9998 D(G(z)): 0.0019 / 0.0009
[7/4000][0/600] Loss_D: 0.0078 Loss_G: 6.1279 D(x): 0.9993 D(G(z)): 0.0070 / 0.0035
[7/4000][100/600] Loss_D: 0.0033 Loss_G: 6.5352 D(x): 0.9996 D(G(z)): 0.0028 / 0.0016
[7/4000][200/600] Loss_D: 0.0034 Loss_G: 6.2562 D(x): 1.0000 D(G(z)): 0.0034 / 0.0020
[7/4000][300/600] Loss_D: 0.0053 Loss_G: 6.3146 D(x): 0.9994 D(G(z)): 0.0047 / 0.0023
[7/4000][400/600] Loss_D: 0.0070 Loss_G: 5.8363 D(x): 0.9998 D(G(z)): 0.0067 / 0.0032
[7/4000][500/600] Loss_D: 0.0056 Loss_G: 6.0468 D(x): 0.9996 D(G(z)): 0.0052 / 0.0025
[8/4000][0/600] Loss_D: 0.0159 Loss_G: 6.1049 D(x): 0.9923 D(G(z)): 0.0052 / 0.0028
[8/4000][100/600] Loss_D: 0.0117 Loss_G: 6.1046 D(x): 0.9945 D(G(z)): 0.0038 / 0.0028
[8/4000][200/600] Loss_D: 0.0063 Loss_G: 6.3023 D(x): 0.9990 D(G(z)): 0.0052 / 0.0023
[8/4000][300/600] Loss_D: 0.0017 Loss_G: 7.2443 D(x): 0.9998 D(G(z)): 0.0015 / 0.0008
[8/4000][400/600] Loss_D: 0.0030 Loss_G: 6.5861 D(x): 0.9998 D(G(z)): 0.0028 / 0.0016
[8/4000][500/600] Loss_D: 0.0029 Loss_G: 6.8881 D(x): 0.9999 D(G(z)): 0.0029 / 0.0014
[9/4000][0/600] Loss_D: 0.0011 Loss_G: 8.4126 D(x): 0.9997 D(G(z)): 0.0008 / 0.0003
[9/4000][100/600] Loss_D: 0.0011 Loss_G: 8.7539 D(x): 1.0000 D(G(z)): 0.0010 / 0.0005
[9/4000][200/600] Loss_D: 0.0031 Loss_G: 8.1306 D(x): 0.9979 D(G(z)): 0.0008 / 0.0004
[9/4000][300/600] Loss_D: 0.0039 Loss_G: 6.9073 D(x): 0.9999 D(G(z)): 0.0038 / 0.0018
[9/4000][400/600] Loss_D: 0.0033 Loss_G: 6.6056 D(x): 0.9996 D(G(z)): 0.0029 / 0.0019
[9/4000][500/600] Loss_D: 0.0069 Loss_G: 7.0539 D(x): 0.9961 D(G(z)): 0.0021 / 0.0012
[10/4000][0/600] Loss_D: 0.0059 Loss_G: 7.3825 D(x): 0.9974 D(G(z)): 0.0032 / 0.0013
[10/4000][100/600] Loss_D: 0.0028 Loss_G: 6.9515 D(x): 1.0000 D(G(z)): 0.0028 / 0.0014
[10/4000][200/600] Loss_D: 0.0033 Loss_G: 7.7476 D(x): 0.9989 D(G(z)): 0.0021 / 0.0011
[10/4000][300/600] Loss_D: 0.0025 Loss_G: 7.2733 D(x): 1.0000 D(G(z)): 0.0025 / 0.0010
[10/4000][400/600] Loss_D: 0.0023 Loss_G: 6.5245 D(x): 0.9999 D(G(z)): 0.0021 / 0.0018
[10/4000][500/600] Loss_D: 0.0011 Loss_G: 7.4187 D(x): 1.0000 D(G(z)): 0.0011 / 0.0008
[11/4000][0/600] Loss_D: 0.0027 Loss_G: 7.9910 D(x): 0.9999 D(G(z)): 0.0026 / 0.0006
[11/4000][100/600] Loss_D: 0.0037 Loss_G: 7.8381 D(x): 0.9983 D(G(z)): 0.0020 / 0.0007
[11/4000][200/600] Loss_D: 0.0015 Loss_G: 8.4313 D(x): 1.0000 D(G(z)): 0.0014 / 0.0003
[11/4000][300/600] Loss_D: 0.0041 Loss_G: 8.3181 D(x): 0.9971 D(G(z)): 0.0008 / 0.0005
[11/4000][400/600] Loss_D: 0.0018 Loss_G: 9.2507 D(x): 0.9996 D(G(z)): 0.0014 / 0.0004
[11/4000][500/600] Loss_D: 0.0001 Loss_G: 10.4236 D(x): 1.0000 D(G(z)): 0.0001 / 0.0001
[12/4000][0/600] Loss_D: 0.0001 Loss_G: 11.2132 D(x): 1.0000 D(G(z)): 0.0001 / 0.0000
[12/4000][100/600] Loss_D: 0.0049 Loss_G: 8.5954 D(x): 0.9963 D(G(z)): 0.0007 / 0.0004
[12/4000][200/600] Loss_D: 0.0010 Loss_G: 8.2567 D(x): 1.0000 D(G(z)): 0.0010 / 0.0004
[12/4000][300/600] Loss_D: 0.0016 Loss_G: 7.6023 D(x): 0.9999 D(G(z)): 0.0015 / 0.0007
[12/4000][400/600] Loss_D: 0.0114 Loss_G: 8.9167 D(x): 0.9933 D(G(z)): 0.0003 / 0.0002
[12/4000][500/600] Loss_D: 0.0027 Loss_G: 8.0427 D(x): 0.9985 D(G(z)): 0.0012 / 0.0010
[13/4000][0/600] Loss_D: 0.0014 Loss_G: 7.9407 D(x): 1.0000 D(G(z)): 0.0014 / 0.0007
[13/4000][100/600] Loss_D: 0.0000 Loss_G: 12.8441 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[13/4000][200/600] Loss_D: 0.0033 Loss_G: 7.0721 D(x): 1.0000 D(G(z)): 0.0033 / 0.0011
[13/4000][300/600] Loss_D: 0.0679 Loss_G: 7.9982 D(x): 0.9801 D(G(z)): 0.0003 / 0.0005
[13/4000][400/600] Loss_D: 0.0016 Loss_G: 9.0502 D(x): 0.9998 D(G(z)): 0.0013 / 0.0003
[13/4000][500/600] Loss_D: 0.0007 Loss_G: 9.3152 D(x): 0.9999 D(G(z)): 0.0006 / 0.0002
[14/4000][0/600] Loss_D: 0.0058 Loss_G: 8.0410 D(x): 0.9996 D(G(z)): 0.0053 / 0.0023
[14/4000][100/600] Loss_D: 0.0023 Loss_G: 8.0422 D(x): 1.0000 D(G(z)): 0.0023 / 0.0010
[14/4000][200/600] Loss_D: 0.0003 Loss_G: 9.9924 D(x): 1.0000 D(G(z)): 0.0003 / 0.0001
[14/4000][300/600] Loss_D: 0.0242 Loss_G: 11.0997 D(x): 0.9908 D(G(z)): 0.0001 / 0.0001
[14/4000][400/600] Loss_D: 0.0002 Loss_G: 11.5158 D(x): 0.9999 D(G(z)): 0.0001 / 0.0000
[14/4000][500/600] Loss_D: 0.0050 Loss_G: 9.2287 D(x): 0.9965 D(G(z)): 0.0007 / 0.0002
[15/4000][0/600] Loss_D: 0.0001 Loss_G: 12.9008 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[15/4000][100/600] Loss_D: 0.0013 Loss_G: 8.3123 D(x): 1.0000 D(G(z)): 0.0013 / 0.0004
[15/4000][200/600] Loss_D: 0.0010 Loss_G: 9.0883 D(x): 1.0000 D(G(z)): 0.0010 / 0.0004
[15/4000][300/600] Loss_D: 0.0009 Loss_G: 9.3353 D(x): 0.9999 D(G(z)): 0.0008 / 0.0003
[15/4000][400/600] Loss_D: 0.0020 Loss_G: 8.5247 D(x): 0.9988 D(G(z)): 0.0008 / 0.0005
[15/4000][500/600] Loss_D: 0.0011 Loss_G: 9.5346 D(x): 0.9995 D(G(z)): 0.0006 / 0.0003
[16/4000][0/600] Loss_D: 0.0010 Loss_G: 8.5918 D(x): 0.9995 D(G(z)): 0.0004 / 0.0003
[16/4000][100/600] Loss_D: 0.0002 Loss_G: 9.4420 D(x): 1.0000 D(G(z)): 0.0001 / 0.0001
[16/4000][200/600] Loss_D: 0.0243 Loss_G: 8.0666 D(x): 0.9909 D(G(z)): 0.0009 / 0.0006
[16/4000][300/600] Loss_D: 0.0016 Loss_G: 8.6173 D(x): 0.9999 D(G(z)): 0.0014 / 0.0013
[16/4000][400/600] Loss_D: 0.0006 Loss_G: 9.5689 D(x): 0.9999 D(G(z)): 0.0006 / 0.0004
[16/4000][500/600] Loss_D: 0.0007 Loss_G: 8.7949 D(x): 1.0000 D(G(z)): 0.0007 / 0.0003
[17/4000][0/600] Loss_D: 0.0001 Loss_G: 11.7579 D(x): 1.0000 D(G(z)): 0.0001 / 0.0000
[17/4000][100/600] Loss_D: 0.0020 Loss_G: 7.7364 D(x): 1.0000 D(G(z)): 0.0020 / 0.0012
[17/4000][200/600] Loss_D: 0.0007 Loss_G: 8.1546 D(x): 1.0000 D(G(z)): 0.0007 / 0.0004
[17/4000][300/600] Loss_D: 0.0003 Loss_G: 9.7369 D(x): 1.0000 D(G(z)): 0.0003 / 0.0002
[17/4000][400/600] Loss_D: 0.0001 Loss_G: 10.2959 D(x): 1.0000 D(G(z)): 0.0001 / 0.0001
[17/4000][500/600] Loss_D: 0.0041 Loss_G: 8.0294 D(x): 0.9979 D(G(z)): 0.0017 / 0.0011
[18/4000][0/600] Loss_D: 0.0004 Loss_G: 11.8662 D(x): 1.0000 D(G(z)): 0.0003 / 0.0001
[18/4000][100/600] Loss_D: 0.0008 Loss_G: 9.1134 D(x): 1.0000 D(G(z)): 0.0008 / 0.0003
[18/4000][200/600] Loss_D: 0.0005 Loss_G: 9.1882 D(x): 1.0000 D(G(z)): 0.0005 / 0.0003
[18/4000][300/600] Loss_D: 0.0237 Loss_G: 6.4524 D(x): 0.9881 D(G(z)): 0.0021 / 0.0029
[18/4000][400/600] Loss_D: 0.0000 Loss_G: 10.4207 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[18/4000][500/600] Loss_D: 0.0002 Loss_G: 9.9491 D(x): 1.0000 D(G(z)): 0.0002 / 0.0001
[19/4000][0/600] Loss_D: 0.0001 Loss_G: 10.3360 D(x): 1.0000 D(G(z)): 0.0001 / 0.0001
[19/4000][100/600] Loss_D: 0.0000 Loss_G: 10.7868 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[19/4000][200/600] Loss_D: 0.0003 Loss_G: 8.3704 D(x): 1.0000 D(G(z)): 0.0003 / 0.0003
[19/4000][300/600] Loss_D: 0.0000 Loss_G: 14.6361 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[19/4000][400/600] Loss_D: 0.0000 Loss_G: 10.7809 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[19/4000][500/600] Loss_D: 0.0001 Loss_G: 9.5596 D(x): 1.0000 D(G(z)): 0.0001 / 0.0001
[20/4000][0/600] Loss_D: 0.0009 Loss_G: 9.2467 D(x): 0.9994 D(G(z)): 0.0002 / 0.0002
[20/4000][100/600] Loss_D: 0.0004 Loss_G: 8.6426 D(x): 1.0000 D(G(z)): 0.0004 / 0.0003
[20/4000][200/600] Loss_D: 0.0001 Loss_G: 10.1458 D(x): 1.0000 D(G(z)): 0.0001 / 0.0001
[20/4000][300/600] Loss_D: 0.0002 Loss_G: 9.2968 D(x): 1.0000 D(G(z)): 0.0002 / 0.0002
[20/4000][400/600] Loss_D: 0.0000 Loss_G: 11.1382 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[20/4000][500/600] Loss_D: 0.0000 Loss_G: 10.1853 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[21/4000][0/600] Loss_D: 0.0000 Loss_G: 16.9898 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[21/4000][100/600] Loss_D: 0.0000 Loss_G: 13.7650 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[21/4000][200/600] Loss_D: 0.0001 Loss_G: 12.4018 D(x): 1.0000 D(G(z)): 0.0001 / 0.0001
[21/4000][300/600] Loss_D: 0.0000 Loss_G: 20.8501 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[21/4000][400/600] Loss_D: 0.0000 Loss_G: 23.9255 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[21/4000][500/600] Loss_D: 0.0000 Loss_G: 14.2179 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[22/4000][0/600] Loss_D: 0.0000 Loss_G: 12.9464 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[22/4000][100/600] Loss_D: 0.0003 Loss_G: 13.3762 D(x): 0.9997 D(G(z)): 0.0000 / 0.0000
[22/4000][200/600] Loss_D: 0.0000 Loss_G: 12.7037 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[22/4000][300/600] Loss_D: 0.0000 Loss_G: 16.6426 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[22/4000][400/600] Loss_D: 0.0001 Loss_G: 10.5659 D(x): 1.0000 D(G(z)): 0.0001 / 0.0000
[22/4000][500/600] Loss_D: 0.0000 Loss_G: 12.8432 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[23/4000][0/600] Loss_D: 0.0001 Loss_G: 11.1291 D(x): 1.0000 D(G(z)): 0.0001 / 0.0000
[23/4000][100/600] Loss_D: 0.0001 Loss_G: 12.6549 D(x): 1.0000 D(G(z)): 0.0001 / 0.0000
[23/4000][200/600] Loss_D: 0.0002 Loss_G: 12.4237 D(x): 1.0000 D(G(z)): 0.0002 / 0.0000
[23/4000][300/600] Loss_D: 0.0001 Loss_G: 12.5987 D(x): 1.0000 D(G(z)): 0.0001 / 0.0001
[23/4000][400/600] Loss_D: 0.0000 Loss_G: 13.7597 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[23/4000][500/600] Loss_D: 0.0004 Loss_G: 9.3521 D(x): 1.0000 D(G(z)): 0.0004 / 0.0003
[24/4000][0/600] Loss_D: 0.0000 Loss_G: 15.1716 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[24/4000][100/600] Loss_D: 0.0000 Loss_G: 14.3250 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[24/4000][200/600] Loss_D: 0.0006 Loss_G: 11.6737 D(x): 0.9994 D(G(z)): 0.0000 / 0.0000
[24/4000][300/600] Loss_D: 0.0000 Loss_G: 17.0129 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[24/4000][400/600] Loss_D: 0.0000 Loss_G: 12.9673 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[24/4000][500/600] Loss_D: 0.0000 Loss_G: 10.6216 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[25/4000][0/600] Loss_D: 0.0000 Loss_G: 12.4660 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[25/4000][100/600] Loss_D: 0.0000 Loss_G: 10.8966 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[25/4000][200/600] Loss_D: 0.0000 Loss_G: 11.2991 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[25/4000][300/600] Loss_D: 0.0001 Loss_G: 27.3804 D(x): 0.9999 D(G(z)): 0.0000 / 0.0000
[25/4000][400/600] Loss_D: 0.0000 Loss_G: 24.6402 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[25/4000][500/600] Loss_D: 0.0003 Loss_G: 10.5355 D(x): 1.0000 D(G(z)): 0.0003 / 0.0002
[26/4000][0/600] Loss_D: 0.0000 Loss_G: 13.4039 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[26/4000][100/600] Loss_D: 0.0006 Loss_G: 10.4117 D(x): 1.0000 D(G(z)): 0.0006 / 0.0002
[26/4000][200/600] Loss_D: 0.0001 Loss_G: 11.0890 D(x): 1.0000 D(G(z)): 0.0001 / 0.0000
[26/4000][300/600] Loss_D: 0.0000 Loss_G: 14.1428 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[26/4000][400/600] Loss_D: 0.0000 Loss_G: 12.7146 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[26/4000][500/600] Loss_D: 0.0000 Loss_G: 11.7097 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[27/4000][0/600] Loss_D: 0.0000 Loss_G: 12.2979 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[27/4000][100/600] Loss_D: 0.0000 Loss_G: 11.8428 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[27/4000][200/600] Loss_D: 0.0000 Loss_G: 12.4924 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[27/4000][300/600] Loss_D: 0.0000 Loss_G: 15.4330 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[27/4000][400/600] Loss_D: 0.0000 Loss_G: 21.4762 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[27/4000][500/600] Loss_D: 0.0000 Loss_G: 11.2736 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[28/4000][0/600] Loss_D: 0.0003 Loss_G: 12.4605 D(x): 1.0000 D(G(z)): 0.0003 / 0.0000
[28/4000][100/600] Loss_D: 0.0000 Loss_G: 18.0769 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[28/4000][200/600] Loss_D: 0.0000 Loss_G: 11.5384 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[28/4000][300/600] Loss_D: 0.0000 Loss_G: 12.3727 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[28/4000][400/600] Loss_D: 0.0000 Loss_G: 11.7965 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[28/4000][500/600] Loss_D: 0.0000 Loss_G: 12.5763 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[29/4000][0/600] Loss_D: 0.0000 Loss_G: 12.1662 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[29/4000][100/600] Loss_D: 0.0000 Loss_G: 27.6310 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[29/4000][200/600] Loss_D: 0.0000 Loss_G: 20.2965 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[29/4000][300/600] Loss_D: 0.0002 Loss_G: 9.8546 D(x): 1.0000 D(G(z)): 0.0002 / 0.0001
[29/4000][400/600] Loss_D: 0.0001 Loss_G: 10.0839 D(x): 1.0000 D(G(z)): 0.0001 / 0.0001
[29/4000][500/600] Loss_D: 0.0000 Loss_G: 13.5178 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[30/4000][0/600] Loss_D: 0.0000 Loss_G: 11.9531 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[30/4000][100/600] Loss_D: 0.0000 Loss_G: 10.9716 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[30/4000][200/600] Loss_D: 0.0000 Loss_G: 11.2880 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[30/4000][300/600] Loss_D: 0.0001 Loss_G: 9.5145 D(x): 1.0000 D(G(z)): 0.0001 / 0.0002
[30/4000][400/600] Loss_D: 0.0000 Loss_G: 11.8010 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[30/4000][500/600] Loss_D: 0.0000 Loss_G: 13.8753 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[31/4000][0/600] Loss_D: 0.0000 Loss_G: 12.8222 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[31/4000][100/600] Loss_D: 0.0001 Loss_G: 11.7067 D(x): 1.0000 D(G(z)): 0.0001 / 0.0001
[31/4000][200/600] Loss_D: 0.0000 Loss_G: 13.9095 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[31/4000][300/600] Loss_D: 0.0000 Loss_G: 12.7524 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[31/4000][400/600] Loss_D: 0.0000 Loss_G: 13.3206 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[31/4000][500/600] Loss_D: 0.0001 Loss_G: 10.8222 D(x): 1.0000 D(G(z)): 0.0001 / 0.0001
[32/4000][0/600] Loss_D: 0.0000 Loss_G: 15.3445 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[32/4000][100/600] Loss_D: 0.0000 Loss_G: 13.4979 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[32/4000][200/600] Loss_D: 0.0000 Loss_G: 21.0071 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[32/4000][300/600] Loss_D: 0.0001 Loss_G: 10.7708 D(x): 1.0000 D(G(z)): 0.0001 / 0.0001
[32/4000][400/600] Loss_D: 0.0000 Loss_G: 12.5390 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-24-b08d64d40419> in <module>()
     35         output = net_D(fake)
     36         errG = criteion(output, label)
---> 37         errG.backward()
     38         D_G_z2 = output.data.mean()
     39         optimizerG.step()

~/anaconda3/lib/python3.6/site-packages/torch/autograd/variable.py in backward(self, gradient, retain_variables)
    144                     'or with gradient w.r.t. the variable')
    145             gradient = self.data.new().resize_as_(self.data).fill_(1)
--> 146         self._execution_engine.run_backward((self,), (gradient,), retain_variables)
    147 
    148     def register_hook(self, hook):

KeyboardInterrupt: 

ぜんぜんダメじゃん!

In [28]:
fake = net_G(fixed_noise)
vutils.save_image(fake.data[:64], '%s/fake_samples.png' % 'results' ,normalize=True)
In [30]:
from PIL import Image
im = Image.open("results/fake_samples.png", "r")
plt.imshow(np.array(im))
Out[30]:
<matplotlib.image.AxesImage at 0x7fea82c9b0b8>