Deep Learning Models -- A collection of various deep learning architectures, models, and tips for TensorFlow and PyTorch in Jupyter Notebooks.
%load_ext watermark
%watermark -a 'Sebastian Raschka' -v -p torch
Sebastian Raschka CPython 3.7.1 IPython 7.2.0 torch 1.0.0
A convolutional autoencoder using deconvolutional layers that compresses 768-pixel MNIST images down to a 7x7x8 (392 pixel) representation.
import time
import numpy as np
import torch
import torch.nn.functional as F
from torch.utils.data import DataLoader
from torchvision import datasets
from torchvision import transforms
if torch.cuda.is_available():
torch.backends.cudnn.deterministic = True
##########################
### SETTINGS
##########################
# Device
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print('Device:', device)
# Hyperparameters
random_seed = 456
learning_rate = 0.005
num_epochs = 10
batch_size = 128
##########################
### MNIST DATASET
##########################
# Note transforms.ToTensor() scales input images
# to 0-1 range
train_dataset = datasets.MNIST(root='data',
train=True,
transform=transforms.ToTensor(),
download=True)
test_dataset = datasets.MNIST(root='data',
train=False,
transform=transforms.ToTensor())
train_loader = DataLoader(dataset=train_dataset,
batch_size=batch_size,
shuffle=True)
test_loader = DataLoader(dataset=test_dataset,
batch_size=batch_size,
shuffle=False)
# Checking the dataset
for images, labels in train_loader:
print('Image batch dimensions:', images.shape)
print('Image label dimensions:', labels.shape)
break
Device: cuda:0 Image batch dimensions: torch.Size([128, 1, 28, 28]) Image label dimensions: torch.Size([128])
##########################
### MODEL
##########################
class ConvolutionalAutoencoder(torch.nn.Module):
def __init__(self):
super(ConvolutionalAutoencoder, self).__init__()
# calculate same padding:
# (w - k + 2*p)/s + 1 = o
# => p = (s(o-1) - w + k)/2
### ENCODER
# 28x28x1 => 28x28x4
self.conv_1 = torch.nn.Conv2d(in_channels=1,
out_channels=4,
kernel_size=(3, 3),
stride=(1, 1),
# (1(28-1) - 28 + 3) / 2 = 1
padding=1)
# 28x28x4 => 14x14x4
self.pool_1 = torch.nn.MaxPool2d(kernel_size=(2, 2),
stride=(2, 2),
# (2(14-1) - 28 + 2) / 2 = 0
padding=0)
# 14x14x4 => 14x14x8
self.conv_2 = torch.nn.Conv2d(in_channels=4,
out_channels=8,
kernel_size=(3, 3),
stride=(1, 1),
# (1(14-1) - 14 + 3) / 2 = 1
padding=1)
# 14x14x8 => 7x7x8
self.pool_2 = torch.nn.MaxPool2d(kernel_size=(2, 2),
stride=(2, 2),
# (2(7-1) - 14 + 2) / 2 = 0
padding=0)
### DECODER
# 7x7x8 => 15x15x4
self.deconv_1 = torch.nn.ConvTranspose2d(in_channels=8,
out_channels=4,
kernel_size=(3, 3),
stride=(2, 2),
padding=0)
# 15x15x4 => 31x31x1
self.deconv_2 = torch.nn.ConvTranspose2d(in_channels=4,
out_channels=1,
kernel_size=(3, 3),
stride=(2, 2),
padding=0)
def forward(self, x):
### ENCODER
x = self.conv_1(x)
x = F.leaky_relu(x)
x = self.pool_1(x)
x = self.conv_2(x)
x = F.leaky_relu(x)
x = self.pool_2(x)
### DECODER
x = self.deconv_1(x)
x = F.leaky_relu(x)
x = self.deconv_2(x)
x = F.leaky_relu(x)
logits = x[:, :, 2:30, 2:30]
probas = torch.sigmoid(logits)
return logits, probas
torch.manual_seed(random_seed)
model = ConvolutionalAutoencoder()
model = model.to(device)
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
start_time = time.time()
for epoch in range(num_epochs):
for batch_idx, (features, targets) in enumerate(train_loader):
# don't need labels, only the images (features)
features = features.to(device)
### FORWARD AND BACK PROP
logits, decoded = model(features)
cost = F.binary_cross_entropy_with_logits(logits, features)
optimizer.zero_grad()
cost.backward()
### UPDATE MODEL PARAMETERS
optimizer.step()
### LOGGING
if not batch_idx % 50:
print ('Epoch: %03d/%03d | Batch %03d/%03d | Cost: %.4f'
%(epoch+1, num_epochs, batch_idx,
len(train_dataset)//batch_size, cost))
print('Time elapsed: %.2f min' % ((time.time() - start_time)/60))
print('Total Training Time: %.2f min' % ((time.time() - start_time)/60))
Epoch: 001/010 | Batch 000/468 | Cost: 0.7191 Epoch: 001/010 | Batch 050/468 | Cost: 0.6911 Epoch: 001/010 | Batch 100/468 | Cost: 0.5994 Epoch: 001/010 | Batch 150/468 | Cost: 0.3594 Epoch: 001/010 | Batch 200/468 | Cost: 0.3035 Epoch: 001/010 | Batch 250/468 | Cost: 0.2480 Epoch: 001/010 | Batch 300/468 | Cost: 0.2085 Epoch: 001/010 | Batch 350/468 | Cost: 0.1780 Epoch: 001/010 | Batch 400/468 | Cost: 0.1586 Epoch: 001/010 | Batch 450/468 | Cost: 0.1532 Epoch: 002/010 | Batch 000/468 | Cost: 0.1442 Epoch: 002/010 | Batch 050/468 | Cost: 0.1366 Epoch: 002/010 | Batch 100/468 | Cost: 0.1362 Epoch: 002/010 | Batch 150/468 | Cost: 0.1293 Epoch: 002/010 | Batch 200/468 | Cost: 0.1286 Epoch: 002/010 | Batch 250/468 | Cost: 0.1244 Epoch: 002/010 | Batch 300/468 | Cost: 0.1226 Epoch: 002/010 | Batch 350/468 | Cost: 0.1240 Epoch: 002/010 | Batch 400/468 | Cost: 0.1239 Epoch: 002/010 | Batch 450/468 | Cost: 0.1193 Epoch: 003/010 | Batch 000/468 | Cost: 0.1196 Epoch: 003/010 | Batch 050/468 | Cost: 0.1197 Epoch: 003/010 | Batch 100/468 | Cost: 0.1217 Epoch: 003/010 | Batch 150/468 | Cost: 0.1167 Epoch: 003/010 | Batch 200/468 | Cost: 0.1115 Epoch: 003/010 | Batch 250/468 | Cost: 0.1144 Epoch: 003/010 | Batch 300/468 | Cost: 0.1092 Epoch: 003/010 | Batch 350/468 | Cost: 0.1164 Epoch: 003/010 | Batch 400/468 | Cost: 0.1141 Epoch: 003/010 | Batch 450/468 | Cost: 0.1071 Epoch: 004/010 | Batch 000/468 | Cost: 0.1121 Epoch: 004/010 | Batch 050/468 | Cost: 0.1130 Epoch: 004/010 | Batch 100/468 | Cost: 0.1043 Epoch: 004/010 | Batch 150/468 | Cost: 0.1098 Epoch: 004/010 | Batch 200/468 | Cost: 0.1104 Epoch: 004/010 | Batch 250/468 | Cost: 0.1095 Epoch: 004/010 | Batch 300/468 | Cost: 0.1105 Epoch: 004/010 | Batch 350/468 | Cost: 0.1088 Epoch: 004/010 | Batch 400/468 | Cost: 0.1040 Epoch: 004/010 | Batch 450/468 | Cost: 0.1098 Epoch: 005/010 | Batch 000/468 | Cost: 0.1045 Epoch: 005/010 | Batch 050/468 | Cost: 0.1030 Epoch: 005/010 | Batch 100/468 | Cost: 0.1029 Epoch: 005/010 | Batch 150/468 | Cost: 0.1063 Epoch: 005/010 | Batch 200/468 | Cost: 0.1056 Epoch: 005/010 | Batch 250/468 | Cost: 0.1046 Epoch: 005/010 | Batch 300/468 | Cost: 0.1074 Epoch: 005/010 | Batch 350/468 | Cost: 0.1062 Epoch: 005/010 | Batch 400/468 | Cost: 0.1029 Epoch: 005/010 | Batch 450/468 | Cost: 0.1074 Epoch: 006/010 | Batch 000/468 | Cost: 0.1051 Epoch: 006/010 | Batch 050/468 | Cost: 0.0983 Epoch: 006/010 | Batch 100/468 | Cost: 0.1031 Epoch: 006/010 | Batch 150/468 | Cost: 0.1060 Epoch: 006/010 | Batch 200/468 | Cost: 0.1044 Epoch: 006/010 | Batch 250/468 | Cost: 0.1013 Epoch: 006/010 | Batch 300/468 | Cost: 0.0992 Epoch: 006/010 | Batch 350/468 | Cost: 0.1010 Epoch: 006/010 | Batch 400/468 | Cost: 0.1020 Epoch: 006/010 | Batch 450/468 | Cost: 0.1047 Epoch: 007/010 | Batch 000/468 | Cost: 0.0979 Epoch: 007/010 | Batch 050/468 | Cost: 0.0978 Epoch: 007/010 | Batch 100/468 | Cost: 0.1001 Epoch: 007/010 | Batch 150/468 | Cost: 0.1023 Epoch: 007/010 | Batch 200/468 | Cost: 0.1008 Epoch: 007/010 | Batch 250/468 | Cost: 0.0943 Epoch: 007/010 | Batch 300/468 | Cost: 0.0968 Epoch: 007/010 | Batch 350/468 | Cost: 0.1017 Epoch: 007/010 | Batch 400/468 | Cost: 0.0988 Epoch: 007/010 | Batch 450/468 | Cost: 0.0992 Epoch: 008/010 | Batch 000/468 | Cost: 0.1015 Epoch: 008/010 | Batch 050/468 | Cost: 0.0995 Epoch: 008/010 | Batch 100/468 | Cost: 0.0988 Epoch: 008/010 | Batch 150/468 | Cost: 0.0980 Epoch: 008/010 | Batch 200/468 | Cost: 0.0986 Epoch: 008/010 | Batch 250/468 | Cost: 0.0958 Epoch: 008/010 | Batch 300/468 | Cost: 0.0958 Epoch: 008/010 | Batch 350/468 | Cost: 0.0928 Epoch: 008/010 | Batch 400/468 | Cost: 0.0986 Epoch: 008/010 | Batch 450/468 | Cost: 0.0972 Epoch: 009/010 | Batch 000/468 | Cost: 0.0985 Epoch: 009/010 | Batch 050/468 | Cost: 0.0958 Epoch: 009/010 | Batch 100/468 | Cost: 0.1002 Epoch: 009/010 | Batch 150/468 | Cost: 0.0980 Epoch: 009/010 | Batch 200/468 | Cost: 0.0973 Epoch: 009/010 | Batch 250/468 | Cost: 0.0966 Epoch: 009/010 | Batch 300/468 | Cost: 0.0948 Epoch: 009/010 | Batch 350/468 | Cost: 0.0983 Epoch: 009/010 | Batch 400/468 | Cost: 0.0986 Epoch: 009/010 | Batch 450/468 | Cost: 0.0976 Epoch: 010/010 | Batch 000/468 | Cost: 0.0975 Epoch: 010/010 | Batch 050/468 | Cost: 0.0971 Epoch: 010/010 | Batch 100/468 | Cost: 0.0976 Epoch: 010/010 | Batch 150/468 | Cost: 0.0953 Epoch: 010/010 | Batch 200/468 | Cost: 0.0982 Epoch: 010/010 | Batch 250/468 | Cost: 0.0964 Epoch: 010/010 | Batch 300/468 | Cost: 0.1003 Epoch: 010/010 | Batch 350/468 | Cost: 0.0914 Epoch: 010/010 | Batch 400/468 | Cost: 0.0971 Epoch: 010/010 | Batch 450/468 | Cost: 0.0959
%matplotlib inline
import matplotlib.pyplot as plt
##########################
### VISUALIZATION
##########################
n_images = 15
image_width = 28
fig, axes = plt.subplots(nrows=2, ncols=n_images,
sharex=True, sharey=True, figsize=(20, 2.5))
orig_images = features[:n_images]
decoded_images = decoded[:n_images]
for i in range(n_images):
for ax, img in zip(axes, [orig_images, decoded_images]):
curr_img = img[i].detach().to(torch.device('cpu'))
ax[i].imshow(curr_img.view((image_width, image_width)), cmap='binary')
%watermark -iv
numpy 1.15.4 torch 1.0.0