11_CNN_CIFAR-10_CUDA


In [90]:
# Pytorch Library
import torch
import torch.nn.init
import torchvision
from torch.autograd import Variable
import torchvision.utils as utils
import torchvision.datasets as dsets
import torchvision.transforms as transforms

########################################################################
# The output of torchvision datasets are PILImage images of range [0, 1].
# We transform them to Tensors of normalized range [-1, 1]

transform = transforms.Compose(
    [transforms.ToTensor(),
     transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])

trainset = torchvision.datasets.CIFAR10(root='./data', train=True,
                                        download=True, transform=transform)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=128,
                                          shuffle=True, num_workers=10)

testset = torchvision.datasets.CIFAR10(root='./data', train=False,
                                       download=True, transform=transform)
testloader = torch.utils.data.DataLoader(testset, batch_size=4,
                                         shuffle=False, num_workers=10)

classes = ('plane', 'car', 'bird', 'cat',
           'deer', 'dog', 'frog', 'horse', 'ship', 'truck')


Files already downloaded and verified
Files already downloaded and verified

In [4]:
# Let us show some of the training images, for fun.

import matplotlib.pyplot as plt
import numpy as np

# functions to show an image


def imshow(img):
    img = img / 2 + 0.5     # unnormalize
    npimg = img.numpy()
    plt.imshow(np.transpose(npimg, (1, 2, 0)))
    plt.show()


# get some random training images
dataiter = iter(trainloader)
images, labels = dataiter.next()

# show images
imshow(torchvision.utils.make_grid(images))
# print labels
print(' '.join('%5s' % classes[labels[j]] for j in range(4)))


horse   dog  frog  frog

In [8]:
########################################################################
# 2. Define a Convolution Neural Network
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

from torch.autograd import Variable
import torch.nn as nn
import torch.nn.functional as F

class Net(nn.Module):
    def __init__(self):
        super(Net, self).__init__()

        # take 3-channel images (instead of 1-channel images as it was defined).
        self.conv1 = nn.Conv2d(3, 24, 5)
        self.b1 = nn.BatchNorm2d(24)
        self.pool = nn.MaxPool2d(2, 2)
        
        self.conv2 = nn.Conv2d(24, 64, 5)
        self.b2 = nn.BatchNorm2d(64)
        
        self.fc1 = nn.Linear(64 * 5 * 5, 240)
        self.fc2 = nn.Linear(240, 84)
        self.fc3 = nn.Linear(84, 10)

    def forward(self, x):
        x = self.pool(F.relu(self.b1(self.conv1(x))))
        x = self.pool(F.relu(self.b2(self.conv2(x))))
        x = x.view(-1, 64 * 5 * 5)
        x = F.relu(self.fc1(x))
        x = F.relu(self.fc2(x))
        x = self.fc3(x)
        return x

net = Net()
net.load_state_dict(torch.load('CNN.pkl'))  # Load the Trained Model
net.cuda()


Out[8]:
Net (
  (conv1): Conv2d(3, 24, kernel_size=(5, 5), stride=(1, 1))
  (b1): BatchNorm2d(24, eps=1e-05, momentum=0.1, affine=True)
  (pool): MaxPool2d (size=(2, 2), stride=(2, 2), dilation=(1, 1))
  (conv2): Conv2d(24, 64, kernel_size=(5, 5), stride=(1, 1))
  (b2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True)
  (fc1): Linear (1600 -> 240)
  (fc2): Linear (240 -> 84)
  (fc3): Linear (84 -> 10)
)

In [9]:
########################################################################
# 3. Define a Loss function and optimizer
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# Let's use a Classification Cross-Entropy loss and SGD with momentum

import torch.optim as optim

criterion = nn.CrossEntropyLoss()
optimizer = optim.Adam(net.parameters())

In [21]:
########################################################################
# 4. Train the network
# ^^^^^^^^^^^^^^^^^^^^
#
# This is when things start to get interesting.
# We simply have to loop over our data iterator, and feed the inputs to the
# network and optimize

for epoch in range(250):  # loop over the dataset multiple times

    running_loss = 0.0
    for i, data in enumerate(trainloader, 0):
        # get the inputs
        inputs, labels = data

        # wrap them in Variable
        inputs, labels = Variable(inputs).cuda(), Variable(labels).cuda()

        # zero the parameter gradients
        optimizer.zero_grad()

        # forward + backward + optimize
        outputs = net(inputs)
        loss = criterion(outputs, labels)
        loss.backward()
        optimizer.step()

        # print statistics
        running_loss += loss.data[0]
        if i % 128 == 127:    # print every 2000 mini-batches
            print('[%d, %5d] loss: %.3f' %
                  (epoch + 1, i + 1, running_loss / 128))
            running_loss = 0.0
            
    correct = 0
    total = 0
    for data in testloader:
        images, labels = data
        outputs = net(Variable(images).cuda())
        _, predicted = torch.max(outputs.data, 1)
        total += labels.size(0)
        correct += (predicted.cpu() == labels).sum()

    print('Accuracy of the network on the 10000 test images: %d %%' % (
        100 * correct / total))

print('Finished Training')

torch.save(net.state_dict(), 'CNN.pkl')  # Save the Model


[1,   128] loss: 0.007
[1,   256] loss: 0.010
[1,   384] loss: 0.012
Accuracy of the network on the 10000 test images: 66 %
[2,   128] loss: 0.007
[2,   256] loss: 0.005
[2,   384] loss: 0.013
Accuracy of the network on the 10000 test images: 66 %
[3,   128] loss: 0.008
[3,   256] loss: 0.008
[3,   384] loss: 0.008
Accuracy of the network on the 10000 test images: 66 %
[4,   128] loss: 0.007
[4,   256] loss: 0.004
[4,   384] loss: 0.005
Accuracy of the network on the 10000 test images: 66 %
[5,   128] loss: 0.004
[5,   256] loss: 0.004
[5,   384] loss: 0.012
Accuracy of the network on the 10000 test images: 65 %
[6,   128] loss: 0.011
[6,   256] loss: 0.015
[6,   384] loss: 0.013
Accuracy of the network on the 10000 test images: 66 %
[7,   128] loss: 0.008
[7,   256] loss: 0.011
[7,   384] loss: 0.008
Accuracy of the network on the 10000 test images: 66 %
[8,   128] loss: 0.009
[8,   256] loss: 0.006
[8,   384] loss: 0.004
Accuracy of the network on the 10000 test images: 66 %
[9,   128] loss: 0.002
[9,   256] loss: 0.005
[9,   384] loss: 0.005
Accuracy of the network on the 10000 test images: 66 %
[10,   128] loss: 0.014
[10,   256] loss: 0.011
[10,   384] loss: 0.016
Accuracy of the network on the 10000 test images: 66 %
[11,   128] loss: 0.013
[11,   256] loss: 0.012
[11,   384] loss: 0.010
Accuracy of the network on the 10000 test images: 66 %
[12,   128] loss: 0.006
[12,   256] loss: 0.004
[12,   384] loss: 0.003
Accuracy of the network on the 10000 test images: 66 %
[13,   128] loss: 0.004
[13,   256] loss: 0.006
[13,   384] loss: 0.006
Accuracy of the network on the 10000 test images: 65 %
[14,   128] loss: 0.010
[14,   256] loss: 0.012
[14,   384] loss: 0.014
Accuracy of the network on the 10000 test images: 66 %
[15,   128] loss: 0.011
[15,   256] loss: 0.010
[15,   384] loss: 0.008
Accuracy of the network on the 10000 test images: 66 %
[16,   128] loss: 0.004
[16,   256] loss: 0.009
[16,   384] loss: 0.008
Accuracy of the network on the 10000 test images: 66 %
[17,   128] loss: 0.006
[17,   256] loss: 0.005
[17,   384] loss: 0.016
Accuracy of the network on the 10000 test images: 65 %
[18,   128] loss: 0.012
[18,   256] loss: 0.007
[18,   384] loss: 0.010
Accuracy of the network on the 10000 test images: 66 %
[19,   128] loss: 0.008
[19,   256] loss: 0.004
[19,   384] loss: 0.006
Accuracy of the network on the 10000 test images: 66 %
[20,   128] loss: 0.004
[20,   256] loss: 0.006
[20,   384] loss: 0.004
Accuracy of the network on the 10000 test images: 66 %
[21,   128] loss: 0.004
[21,   256] loss: 0.004
[21,   384] loss: 0.006
Accuracy of the network on the 10000 test images: 66 %
[22,   128] loss: 0.015
[22,   256] loss: 0.014
[22,   384] loss: 0.013
Accuracy of the network on the 10000 test images: 66 %
[23,   128] loss: 0.009
[23,   256] loss: 0.010
[23,   384] loss: 0.008
Accuracy of the network on the 10000 test images: 67 %
[24,   128] loss: 0.005
[24,   256] loss: 0.005
[24,   384] loss: 0.008
Accuracy of the network on the 10000 test images: 66 %
[25,   128] loss: 0.005
[25,   256] loss: 0.005
[25,   384] loss: 0.007
Accuracy of the network on the 10000 test images: 66 %
[26,   128] loss: 0.004
[26,   256] loss: 0.004
[26,   384] loss: 0.010
Accuracy of the network on the 10000 test images: 65 %
[27,   128] loss: 0.010
[27,   256] loss: 0.017
[27,   384] loss: 0.015
Accuracy of the network on the 10000 test images: 66 %
[28,   128] loss: 0.007
[28,   256] loss: 0.008
[28,   384] loss: 0.008
Accuracy of the network on the 10000 test images: 66 %
[29,   128] loss: 0.005
[29,   256] loss: 0.011
[29,   384] loss: 0.010
Accuracy of the network on the 10000 test images: 66 %
[30,   128] loss: 0.013
[30,   256] loss: 0.013
[30,   384] loss: 0.008
Process Process-656:
Process Process-655:
Process Process-658:
Process Process-654:
Process Process-651:
Process Process-659:
Process Process-657:
---------
KeyboardInterruptTraceback (most recent call last)
<ipython-input-21-d0a40d6d4a02> in <module>()
     35     correct = 0
     36     total = 0
---> 37     for data in testloader:
     38         images, labels = data
     39         outputs = net(Variable(images).cuda())

~/anaconda3/envs/lab_conda/lib/python3.6/site-packages/torch/utils/data/dataloader.py in __next__(self)
    194         while True:
    195             assert (not self.shutdown and self.batches_outstanding > 0)
--> 196             idx, batch = self.data_queue.get()
    197             self.batches_outstanding -= 1
    198             if idx != self.rcvd_idx:

~/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/queues.py in get(self)
    342             res = self._reader.recv_bytes()
    343         # unserialize the data after having released the lock
--> 344         return _ForkingPickler.loads(res)
    345 
    346     def put(self, obj):

~/anaconda3/envs/lab_conda/lib/python3.6/site-packages/torch/multiprocessing/reductions.py in rebuild_storage_fd(cls, df, size)
     68         fd = multiprocessing.reduction.rebuild_handle(df)
     69     else:
---> 70         fd = df.detach()
     71     try:
     72         storage = storage_from_cache(cls, fd_id(fd))

~/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/resource_sharer.py in detach(self)
     55         def detach(self):
     56             '''Get the fd.  This should only be called once.'''
---> 57             with _resource_sharer.get_connection(self._id) as conn:
     58                 return reduction.recv_handle(conn)
     59 

~/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/resource_sharer.py in get_connection(ident)
     85         from .connection import Client
     86         address, key = ident
---> 87         c = Client(address, authkey=process.current_process().authkey)
     88         c.send((key, os.getpid()))
     89         return c

~/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/connection.py in Client(address, family, authkey)
    491 
    492     if authkey is not None:
--> 493         answer_challenge(c, authkey)
    494         deliver_challenge(c, authkey)
    495 

~/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/connection.py in answer_challenge(connection, authkey)
    730     import hmac
    731     assert isinstance(authkey, bytes)
--> 732     message = connection.recv_bytes(256)         # reject large message
    733     assert message[:len(CHALLENGE)] == CHALLENGE, 'message = %r' % message
    734     message = message[len(CHALLENGE):]

~/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/connection.py in recv_bytes(self, maxlength)
    214         if maxlength is not None and maxlength < 0:
    215             raise ValueError("negative maxlength")
--> 216         buf = self._recv_bytes(maxlength)
    217         if buf is None:
    218             self._bad_message_length()

~/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/connection.py in _recv_bytes(self, maxsize)
    405 
    406     def _recv_bytes(self, maxsize=None):
--> 407         buf = self._recv(4)
    408         size, = struct.unpack("!i", buf.getvalue())
    409         if maxsize is not None and size > maxsize:

~/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/connection.py in _recv(self, size, read)
    377         remaining = size
    378         while remaining > 0:
--> 379             chunk = read(handle, remaining)
    380             n = len(chunk)
    381             if n == 0:

KeyboardInterrupt: 
Traceback (most recent call last):
Traceback (most recent call last):
Process Process-652:
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
    self.run()
Process Process-660:
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
    self.run()
Traceback (most recent call last):
Process Process-653:
Traceback (most recent call last):
Traceback (most recent call last):
Traceback (most recent call last):
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 93, in run
    self._target(*self._args, **self._kwargs)
Traceback (most recent call last):
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
    self.run()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 93, in run
    self._target(*self._args, **self._kwargs)
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
    self.run()
Traceback (most recent call last):
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
    self.run()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
    self.run()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
    self.run()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/site-packages/torch/utils/data/dataloader.py", line 34, in _worker_loop
    r = index_queue.get()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 93, in run
    self._target(*self._args, **self._kwargs)
Traceback (most recent call last):
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/site-packages/torch/utils/data/dataloader.py", line 34, in _worker_loop
    r = index_queue.get()
Traceback (most recent call last):
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
    self.run()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 93, in run
    self._target(*self._args, **self._kwargs)
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 93, in run
    self._target(*self._args, **self._kwargs)
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 93, in run
    self._target(*self._args, **self._kwargs)
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 93, in run
    self._target(*self._args, **self._kwargs)
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/site-packages/torch/utils/data/dataloader.py", line 34, in _worker_loop
    r = index_queue.get()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/queues.py", line 341, in get
    with self._rlock:
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/queues.py", line 341, in get
    with self._rlock:
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
    self.run()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
    self.run()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 93, in run
    self._target(*self._args, **self._kwargs)
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/site-packages/torch/utils/data/dataloader.py", line 34, in _worker_loop
    r = index_queue.get()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/site-packages/torch/utils/data/dataloader.py", line 34, in _worker_loop
    r = index_queue.get()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/queues.py", line 341, in get
    with self._rlock:
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/site-packages/torch/utils/data/dataloader.py", line 34, in _worker_loop
    r = index_queue.get()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/site-packages/torch/utils/data/dataloader.py", line 34, in _worker_loop
    r = index_queue.get()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/synchronize.py", line 96, in __enter__
    return self._semlock.__enter__()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/synchronize.py", line 96, in __enter__
    return self._semlock.__enter__()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 93, in run
    self._target(*self._args, **self._kwargs)
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 93, in run
    self._target(*self._args, **self._kwargs)
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/site-packages/torch/utils/data/dataloader.py", line 34, in _worker_loop
    r = index_queue.get()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/synchronize.py", line 96, in __enter__
    return self._semlock.__enter__()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/queues.py", line 341, in get
    with self._rlock:
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/queues.py", line 341, in get
    with self._rlock:
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/queues.py", line 342, in get
    res = self._reader.recv_bytes()
KeyboardInterrupt
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/queues.py", line 341, in get
    with self._rlock:
KeyboardInterrupt
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/site-packages/torch/utils/data/dataloader.py", line 34, in _worker_loop
    r = index_queue.get()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/site-packages/torch/utils/data/dataloader.py", line 34, in _worker_loop
    r = index_queue.get()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/queues.py", line 341, in get
    with self._rlock:
KeyboardInterrupt
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/synchronize.py", line 96, in __enter__
    return self._semlock.__enter__()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/synchronize.py", line 96, in __enter__
    return self._semlock.__enter__()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/connection.py", line 216, in recv_bytes
    buf = self._recv_bytes(maxlength)
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/queues.py", line 341, in get
    with self._rlock:
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/synchronize.py", line 96, in __enter__
    return self._semlock.__enter__()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/queues.py", line 341, in get
    with self._rlock:
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/synchronize.py", line 96, in __enter__
    return self._semlock.__enter__()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/connection.py", line 407, in _recv_bytes
    buf = self._recv(4)
KeyboardInterrupt
KeyboardInterrupt
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/synchronize.py", line 96, in __enter__
    return self._semlock.__enter__()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/synchronize.py", line 96, in __enter__
    return self._semlock.__enter__()
KeyboardInterrupt
KeyboardInterrupt
KeyboardInterrupt
KeyboardInterrupt
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/connection.py", line 379, in _recv
    chunk = read(handle, remaining)
KeyboardInterrupt

In [18]:
########################################################################
# 5. Test the network on the test data
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# We have trained the network for 2 passes over the training dataset.
# But we need to check if the network has learnt anything at all.
#
# We will check this by predicting the class label that the neural network
# outputs, and checking it against the ground-truth. If the prediction is
# correct, we add the sample to the list of correct predictions.
#
# Okay, first step. Let us display an image from the test set to get familiar.

dataiter = iter(testloader)
images, labels = dataiter.next()

# print images
imshow(torchvision.utils.make_grid(images))
print('GroundTruth: ', ' '.join('%5s' % classes[labels[j]] for j in range(4)))


GroundTruth:    cat  ship  ship plane
Process Process-50:
Process Process-49:
Process Process-43:
Process Process-48:
Process Process-42:
Process Process-47:
Process Process-44:
Process Process-41:
Process Process-46:
Process Process-45:
Traceback (most recent call last):
Traceback (most recent call last):
Traceback (most recent call last):
Traceback (most recent call last):
Traceback (most recent call last):
Traceback (most recent call last):
Traceback (most recent call last):
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
    self.run()
Traceback (most recent call last):
Traceback (most recent call last):
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
    self.run()
Traceback (most recent call last):
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
    self.run()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
    self.run()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
    self.run()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
    self.run()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
    self.run()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 93, in run
    self._target(*self._args, **self._kwargs)
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
    self.run()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
    self.run()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 93, in run
    self._target(*self._args, **self._kwargs)
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 249, in _bootstrap
    self.run()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 93, in run
    self._target(*self._args, **self._kwargs)
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 93, in run
    self._target(*self._args, **self._kwargs)
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 93, in run
    self._target(*self._args, **self._kwargs)
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 93, in run
    self._target(*self._args, **self._kwargs)
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 93, in run
    self._target(*self._args, **self._kwargs)
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/site-packages/torch/utils/data/dataloader.py", line 34, in _worker_loop
    r = index_queue.get()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 93, in run
    self._target(*self._args, **self._kwargs)
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/site-packages/torch/utils/data/dataloader.py", line 34, in _worker_loop
    r = index_queue.get()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 93, in run
    self._target(*self._args, **self._kwargs)
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/site-packages/torch/utils/data/dataloader.py", line 34, in _worker_loop
    r = index_queue.get()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/process.py", line 93, in run
    self._target(*self._args, **self._kwargs)
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/site-packages/torch/utils/data/dataloader.py", line 34, in _worker_loop
    r = index_queue.get()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/site-packages/torch/utils/data/dataloader.py", line 34, in _worker_loop
    r = index_queue.get()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/site-packages/torch/utils/data/dataloader.py", line 34, in _worker_loop
    r = index_queue.get()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/site-packages/torch/utils/data/dataloader.py", line 34, in _worker_loop
    r = index_queue.get()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/queues.py", line 341, in get
    with self._rlock:
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/site-packages/torch/utils/data/dataloader.py", line 34, in _worker_loop
    r = index_queue.get()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/queues.py", line 341, in get
    with self._rlock:
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/site-packages/torch/utils/data/dataloader.py", line 34, in _worker_loop
    r = index_queue.get()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/queues.py", line 341, in get
    with self._rlock:
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/site-packages/torch/utils/data/dataloader.py", line 34, in _worker_loop
    r = index_queue.get()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/queues.py", line 341, in get
    with self._rlock:
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/queues.py", line 341, in get
    with self._rlock:
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/queues.py", line 341, in get
    with self._rlock:
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/queues.py", line 341, in get
    with self._rlock:
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/synchronize.py", line 96, in __enter__
    return self._semlock.__enter__()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/queues.py", line 341, in get
    with self._rlock:
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/synchronize.py", line 96, in __enter__
    return self._semlock.__enter__()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/queues.py", line 341, in get
    with self._rlock:
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/queues.py", line 342, in get
    res = self._reader.recv_bytes()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/synchronize.py", line 96, in __enter__
    return self._semlock.__enter__()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/synchronize.py", line 96, in __enter__
    return self._semlock.__enter__()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/synchronize.py", line 96, in __enter__
    return self._semlock.__enter__()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/synchronize.py", line 96, in __enter__
    return self._semlock.__enter__()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/synchronize.py", line 96, in __enter__
    return self._semlock.__enter__()
KeyboardInterrupt
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/synchronize.py", line 96, in __enter__
    return self._semlock.__enter__()
KeyboardInterrupt
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/synchronize.py", line 96, in __enter__
    return self._semlock.__enter__()
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/connection.py", line 216, in recv_bytes
    buf = self._recv_bytes(maxlength)
KeyboardInterrupt
KeyboardInterrupt
KeyboardInterrupt
KeyboardInterrupt
KeyboardInterrupt
KeyboardInterrupt
KeyboardInterrupt
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/connection.py", line 407, in _recv_bytes
    buf = self._recv(4)
  File "/home/isjeon/anaconda3/envs/lab_conda/lib/python3.6/multiprocessing/connection.py", line 379, in _recv
    chunk = read(handle, remaining)
KeyboardInterrupt

In [19]:
########################################################################
# Okay, now let us see what the neural network thinks these examples above are:

outputs = net(Variable(images).cuda())

########################################################################
# The outputs are energies for the 10 classes.
# Higher the energy for a class, the more the network
# thinks that the image is of the particular class.
# So, let's get the index of the highest energy:
_, predicted = torch.max(outputs.data.cpu(), 1)

print('Predicted: ', ' '.join('%5s' % classes[predicted[j]]
                              for j in range(4) ))


Predicted:    cat   car   cat  bird

In [20]:
########################################################################
# The results seem pretty good.
#
# Let us look at how the network performs on the whole dataset.

correct = 0
total = 0
for data in testloader:
    images, labels = data
    outputs = net(Variable(images).cuda())
    _, predicted = torch.max(outputs.data, 1)
    total += labels.size(0)
    correct += (predicted.cpu() == labels).sum()

print('Accuracy of the network on the 10000 test images: %d %%' % (
    100 * correct / total))


Accuracy of the network on the 10000 test images: 66 %

In [9]:
########################################################################
# That looks waaay better than chance, which is 10% accuracy (randomly picking
# a class out of 10 classes).
# Seems like the network learnt something.
#
# Hmmm, what are the classes that performed well, and the classes that did
# not perform well:

class_correct = list(0. for i in range(10))
class_total = list(0. for i in range(10))
for data in testloader:
    images, labels = data
    outputs = net(Variable(images.cuda()))
    _, predicted = torch.max(outputs.data, 1)
    c = (predicted.cpu() == labels).squeeze()
    for i in range(4):
        label = labels[i]
        class_correct[label] += c[i]
        class_total[label] += 1


for i in range(10):
    print('Accuracy of %5s : %2d %%' % (
        classes[i], 100 * class_correct[i] / class_total[i]))


Accuracy of plane : 65 %
Accuracy of   car : 75 %
Accuracy of  bird : 49 %
Accuracy of   cat : 43 %
Accuracy of  deer : 54 %
Accuracy of   dog : 48 %
Accuracy of  frog : 71 %
Accuracy of horse : 66 %
Accuracy of  ship : 75 %
Accuracy of truck : 71 %

In [ ]: