In [32]:
# Import modules
import os
import pandas as pd
import numpy as np
from skimage.io import imread
from lasagne import layers
from lasagne.nonlinearities import softmax
from nolearn.lasagne import NeuralNet, BatchIterator

# Set path
path = os.path.expanduser('~') + '/workspace/julia'

In [28]:
# Define functions
def read_data(typeData, labelsInfo, imageSize, path):
    x = np.zeros((labelsInfo.shape[0], imageSize))
    
    for (index, idImage) in enumerate(labelsInfo['ID']):
        # use specially created 32 x 32 images
        nameFile = '{0}/{1}Resized32/{2}.Bmp'.format(path, 
                    typeData, idImage)
        img = imread(nameFile, as_grey = True)
        
        x[index, :] = np.reshape(img, (1, imageSize))
        
    return x

def fit_model(reshaped_train_x, y, image_width, 
                    image_height, reshaped_test_x):
    net = NeuralNet(
        layers = [
            ('input', layers.InputLayer),
            ('conv1', layers.Conv2DLayer),
            ('pool1', layers.MaxPool2DLayer),
            ('dropout1', layers.DropoutLayer),
            ('conv2', layers.Conv2DLayer),
            ('pool2', layers.MaxPool2DLayer),
            ('dropout2', layers.DropoutLayer),
            ('conv3', layers.Conv2DLayer),
            ('hidden4', layers.DenseLayer),
            ('output', layers.DenseLayer),
        ],
        input_shape = (None, 1, 32, 32),
        conv1_num_filters=32, conv1_filter_size=(5, 5), 
        pool1_pool_size=(2, 2),
        dropout1_p=0.2,
        conv2_num_filters=64, conv2_filter_size=(5, 5), 
        pool2_pool_size=(2, 2),
        dropout2_p=0.2,
        conv3_num_filters = 128, conv3_filter_size = (5, 5),
        hidden4_num_units=500,
        output_num_units = 62, output_nonlinearity = softmax,
        
        update_learning_rate = 0.01,
        update_momentum = 0.9,
        
        batch_iterator_train = BatchIterator(batch_size = 100),
        batch_iterator_test = BatchIterator(batch_size = 100),
        
        use_label_encoder = True,
        regression = False,
        max_epochs = 100,
        verbose = 1,
    )
    
    net.fit(reshaped_train_x, y)
    prediction = net.predict(reshaped_test_x)
    
    return prediction

In [11]:
# Set variables
imageSize = 1024 # 32 x 32
image_width = image_height = int(imageSize ** 0.5)

labelsInfoTrain = pd.read_csv\
            ('{0}/trainLabels.csv'.format(path))
labelsInfoTest = pd.read_csv\
            ('{0}/sampleSubmission.csv'.format(path))

# Load dataset
xTrain = read_data('train', labelsInfoTrain, imageSize, path)
xTest = read_data('test', labelsInfoTest, imageSize, path)

yTrain = map(ord, labelsInfoTrain['Class'])
yTrain = np.array(yTrain)

In [12]:
# Preprocess (Normalize) data
xTrain /= xTrain.std(axis = None)
xTrain -= xTrain.mean()

xTest /= xTest.std(axis = None)
xTest -= xTest.mean()

In [17]:
# Reshape data
train_x_reshaped = xTrain.reshape(xTrain.shape[0], 1, 
                  image_height, image_width).astype('float32')
test_x_reshaped = xTest.reshape(xTest.shape[0], 1, 
                  image_height, image_width).astype('float32')

In [29]:
# Train and test data
predict = fit_model(train_x_reshaped, yTrain, image_width, 
                            image_height, test_x_reshaped)


# Neural Network with 352586 learnable parameters

## Layer information

  #  name      size
---  --------  --------
  0  input     1x32x32
  1  conv1     32x28x28
  2  pool1     32x14x14
  3  dropout1  32x14x14
  4  conv2     64x10x10
  5  pool2     64x5x5
  6  dropout2  64x5x5
  7  conv3     128x1x1
  8  hidden4   500
  9  output    62

  epoch    train loss    valid loss    train/val    valid acc  dur
-------  ------------  ------------  -----------  -----------  -----
      1       4.07326       4.00454      1.01716      0.07154  5.59s
      2       3.84679       3.82432      1.00588      0.04863  5.27s
      3       3.77750       3.81225      0.99089      0.04863  5.26s
      4       3.76895       3.80286      0.99108      0.04863  5.24s
      5       3.76626       3.79825      0.99158      0.07154  5.25s
      6       3.76250       3.79669      0.99099      0.07154  5.25s
      7       3.75885       3.79668      0.99004      0.07154  5.26s
      8       3.75819       3.79587      0.99007      0.07154  5.39s
      9       3.75566       3.79618      0.98933      0.07154  5.49s
     10       3.75521       3.79599      0.98926      0.07154  5.50s
     11       3.75258       3.79575      0.98863      0.07154  5.41s
     12       3.75223       3.79508      0.98871      0.07154  5.60s
     13       3.75009       3.79502      0.98816      0.07154  5.34s
     14       3.74724       3.79510      0.98739      0.07154  5.27s
     15       3.74683       3.79414      0.98753      0.07154  5.26s
     16       3.74490       3.79290      0.98735      0.07154  5.26s
     17       3.74061       3.79361      0.98603      0.07077  5.33s
     18       3.73745       3.79995      0.98355      0.07154  5.48s
     19       3.73247       3.80932      0.97982      0.07308  5.25s
     20       3.76033       3.79326      0.99132      0.07154  5.25s
     21       3.74361       3.79066      0.98759      0.07154  5.25s
     22       3.73327       3.78847      0.98543      0.08077  5.25s
     23       3.72529       3.78147      0.98514      0.08231  5.26s
     24       3.70952       3.76842      0.98437      0.08077  5.26s
     25       3.67660       3.75301      0.97964      0.05000  5.26s
     26       3.61354       3.70779      0.97458      0.06077  6.02s
     27       3.46199       3.52721      0.98151      0.16623  5.33s
     28       3.20525       3.17113      1.01076      0.23871  6.01s
     29       2.83059       2.73610      1.03454      0.33306  5.25s
     30       2.43685       2.37554      1.02581      0.37852  5.25s
     31       2.13025       2.13476      0.99789      0.43792  5.25s
     32       1.90612       1.94077      0.98215      0.48953  5.25s
     33       1.71778       1.82689      0.94028      0.50525  5.25s
     34       1.57491       1.72397      0.91354      0.53756  5.25s
     35       1.78876       1.95674      0.91415      0.51987  5.25s
     36       3.09262       3.02139      1.02357      0.25964  5.73s
     37       2.13303       2.02259      1.05460      0.48321  5.25s
     38       1.79429       1.82290      0.98431      0.51278  5.25s
     39       1.59881       1.69493      0.94329      0.54977  5.25s
     40       1.46552       1.65756      0.88415      0.55131  5.25s
     41       1.34792       1.56996      0.85857      0.57182  5.25s
     42       1.27254       1.53409      0.82951      0.58985  5.66s
     43       1.19391       1.52784      0.78144      0.58353  5.25s
     44       1.16085       1.50833      0.76963      0.58857  5.25s
     45       1.09038       1.47203      0.74073      0.60122  5.25s
     46       1.03941       1.48078      0.70194      0.60139  5.25s
     47       1.00219       1.46537      0.68392      0.60216  5.25s
     48       0.94361       1.46669      0.64336      0.61326  5.25s
     49       0.91288       1.46016      0.62520      0.60814  5.25s
     50       0.88041       1.41993      0.62004      0.61976  5.25s
     51       0.85397       1.44071      0.59274      0.62386  5.35s
     52       0.80320       1.43134      0.56115      0.62771  5.26s
     53       0.76381       1.46737      0.52053      0.62069  5.25s
     54       0.75521       1.47131      0.51329      0.61966  5.25s
     55       0.72066       1.49156      0.48316      0.62420  5.25s
     56       0.68973       1.51686      0.45471      0.62180  5.25s
     57       0.68172       1.46853      0.46422      0.63882  5.25s
     58       0.63758       1.51531      0.42076      0.62779  5.25s
     59       0.62820       1.52620      0.41161      0.63846  5.26s
     60       0.60932       1.51809      0.40137      0.63163  5.25s
     61       0.56907       1.53922      0.36971      0.64940  5.25s
     62       0.57029       1.55967      0.36565      0.64462  5.25s
     63       0.53986       1.57159      0.34351      0.64103  5.25s
     64       0.52968       1.61243      0.32850      0.63514  5.25s
     65       0.48806       1.58519      0.30789      0.64291  5.77s
     66       0.48550       1.64693      0.29479      0.65240  5.48s
     67       0.46792       1.65604      0.28255      0.64555  5.25s
     68       0.44722       1.68455      0.26548      0.64615  5.25s
     69       0.43531       1.74900      0.24889      0.64000  5.37s
     70       0.43284       1.73617      0.24931      0.63839  5.38s
     71       0.42692       1.73128      0.24659      0.64906  5.25s
     72       0.39049       1.70835      0.22858      0.65409  5.25s
     73       0.38798       1.73288      0.22389      0.65777  5.25s
     74       0.36650       1.75874      0.20839      0.65248  5.25s
     75       0.36364       1.75397      0.20732      0.65257  5.25s
     76       0.34440       1.80747      0.19054      0.64512  5.25s
     77       0.34918       1.87867      0.18586      0.63632  5.25s
     78       0.32694       1.84668      0.17704      0.64940  5.25s
     79       0.32571       1.87398      0.17381      0.64563  5.25s
     80       0.31559       1.83828      0.17168      0.65392  5.25s
     81       0.28521       1.87713      0.15194      0.65914  5.25s
     82       0.27954       1.90219      0.14696      0.65777  5.26s
     83       0.28673       1.88709      0.15195      0.66084  5.35s
     84       0.26510       1.95600      0.13553      0.64428  5.38s
     85       0.26781       2.04164      0.13117      0.64642  5.25s
     86       0.26873       1.96365      0.13685      0.64871  5.25s
     87       0.26025       2.02610      0.12845      0.63719  5.55s
     88       0.26616       1.99416      0.13347      0.64400  5.44s
     89       0.24855       2.04746      0.12139      0.65897  5.25s
     90       0.22836       1.96777      0.11605      0.66068  5.25s
     91       0.22533       2.06428      0.10916      0.65349  5.35s
     92       0.21076       2.03331      0.10365      0.66700  5.61s
     93       0.19389       2.10222      0.09223      0.65750  5.48s
     94       0.20598       2.05408      0.10028      0.65418  5.58s
     95       0.20664       2.12356      0.09731      0.65820  5.45s
     96       0.20886       2.04822      0.10197      0.65435  5.38s
     97       0.20206       2.09545      0.09643      0.65495  5.68s
     98       0.19895       2.11221      0.09419      0.64923  5.64s
     99       0.18409       2.11339      0.08711      0.66486  5.71s
    100       0.18734       2.10825      0.08886      0.67084  5.34s

In [30]:
# Convert for submission
yTest = map(chr, predict)
labelsInfoTest['Class'] = yTest
labelsInfoTest.to_csv('{0}/subzero.csv'.format(path), index = False)

Gives 0.66492 score (results may vary)