Otto Group Product Classification Challenge using nolearn/lasagne

This short notebook is meant to help you getting started with nolearn and lasagne in order to train a neural net and make a submission to the Otto Group Product Classification Challenge.

Imports


In [1]:
import numpy as np
import pandas as pd
from sklearn.preprocessing import LabelEncoder
from sklearn.preprocessing import StandardScaler

In [2]:
from lasagne.layers import DenseLayer
from lasagne.layers import InputLayer
from lasagne.layers import DropoutLayer
from lasagne.nonlinearities import softmax
from lasagne.updates import nesterov_momentum
from nolearn.lasagne import NeuralNet

Utility functions


In [3]:
def load_train_data(path):
    df = pd.read_csv(path)
    X = df.values.copy()
    np.random.shuffle(X)
    X, labels = X[:, 1:-1].astype(np.float32), X[:, -1]
    encoder = LabelEncoder()
    y = encoder.fit_transform(labels).astype(np.int32)
    scaler = StandardScaler()
    X = scaler.fit_transform(X)
    return X, y, encoder, scaler

In [4]:
def load_test_data(path, scaler):
    df = pd.read_csv(path)
    X = df.values.copy()
    X, ids = X[:, 1:].astype(np.float32), X[:, 0].astype(str)
    X = scaler.transform(X)
    return X, ids

In [5]:
def make_submission(clf, X_test, ids, encoder, name='my_neural_net_submission.csv'):
    y_prob = clf.predict_proba(X_test)
    with open(name, 'w') as f:
        f.write('id,')
        f.write(','.join(encoder.classes_))
        f.write('\n')
        for id, probs in zip(ids, y_prob):
            probas = ','.join([id] + map(str, probs.tolist()))
            f.write(probas)
            f.write('\n')
    print("Wrote submission to file {}.".format(name))

Load Data


In [6]:
X, y, encoder, scaler = load_train_data('data/train.csv')

In [7]:
X_test, ids = load_test_data('data/test.csv', scaler)

In [8]:
num_classes = len(encoder.classes_)
num_features = X.shape[1]

Train Neural Net


In [9]:
layers0 = [('input', InputLayer),
           ('dense0', DenseLayer),
           ('dropout', DropoutLayer),
           ('dense1', DenseLayer),
           ('output', DenseLayer)]

In [10]:
net0 = NeuralNet(layers=layers0,
                 
                 input_shape=(None, num_features),
                 dense0_num_units=200,
                 dropout_p=0.5,
                 dense1_num_units=200,
                 output_num_units=num_classes,
                 output_nonlinearity=softmax,
                 
                 update=nesterov_momentum,
                 update_learning_rate=0.01,
                 update_momentum=0.9,
                 
                 eval_size=0.2,
                 verbose=1,
                 max_epochs=20)

In [11]:
net0.fit(X, y)


  input             	(None, 93L)         	produces      93 outputs
  dense0            	(None, 200)         	produces     200 outputs
  dropout           	(None, 200)         	produces     200 outputs
  dense1            	(None, 200)         	produces     200 outputs
  output            	(None, 9)           	produces       9 outputs

 Epoch  |  Train loss  |  Valid loss  |  Train / Val  |  Valid acc  |  Dur
--------|--------------|--------------|---------------|-------------|-------
     1  |    0.929630  |    0.674008  |     1.379258  |     75.32%  |  1.6s
     2  |    0.706279  |    0.627249  |     1.125994  |     76.74%  |  1.2s
     3  |    0.665223  |    0.602289  |     1.104491  |     76.96%  |  1.2s
     4  |    0.640945  |    0.587731  |     1.090540  |     77.73%  |  1.2s
     5  |    0.623396  |    0.577516  |     1.079444  |     77.79%  |  1.5s
     6  |    0.612419  |    0.568359  |     1.077520  |     77.85%  |  1.3s
     7  |    0.599983  |    0.566710  |     1.058712  |     78.34%  |  1.3s
     8  |    0.592689  |    0.559909  |     1.058544  |     78.17%  |  1.2s
     9  |    0.581145  |    0.557094  |     1.043171  |     78.57%  |  1.6s
    10  |    0.578483  |    0.553404  |     1.045318  |     78.39%  |  1.8s
    11  |    0.568782  |    0.550518  |     1.033176  |     78.74%  |  1.8s
    12  |    0.563738  |    0.546480  |     1.031582  |     78.88%  |  1.8s
    13  |    0.559736  |    0.542942  |     1.030933  |     78.92%  |  1.9s
    14  |    0.554280  |    0.540654  |     1.025202  |     79.07%  |  1.8s
    15  |    0.553027  |    0.536958  |     1.029927  |     79.03%  |  1.8s
    16  |    0.548043  |    0.537584  |     1.019456  |     79.13%  |  1.9s
    17  |    0.547400  |    0.534357  |     1.024408  |     79.39%  |  1.8s
    18  |    0.544515  |    0.532123  |     1.023289  |     79.27%  |  1.9s
    19  |    0.537075  |    0.531905  |     1.009719  |     79.42%  |  1.8s
    20  |    0.536718  |    0.531486  |     1.009845  |     79.16%  |  2.2s
Out[11]:
NeuralNet(X_tensor_type=<function matrix at 0x0000000013F15518>,
     batch_iterator_test=<nolearn.lasagne.BatchIterator object at 0x000000001492D470>,
     batch_iterator_train=<nolearn.lasagne.BatchIterator object at 0x00000000147E3AC8>,
     dense0_num_units=200, dense1_num_units=200, dropout_p=0.5,
     eval_size=0.2, input_shape=(None, 93L),
     layers=[('input', <class 'lasagne.layers.input.InputLayer'>), ('dense0', <class 'lasagne.layers.dense.DenseLayer'>), ('dropout', <class 'lasagne.layers.noise.DropoutLayer'>), ('dense1', <class 'lasagne.layers.dense.DenseLayer'>), ('output', <class 'lasagne.layers.dense.DenseLayer'>)],
     loss=None, max_epochs=20, more_params={},
     objective=<nolearn.lasagne.LossObjective object at 0x000000001492DDD8>,
     on_epoch_finished=(), on_training_finished=(),
     output_nonlinearity=<theano.tensor.nnet.nnet.Softmax object at 0x00000000141FB828>,
     output_num_units=9, regression=False,
     update=<function nesterov_momentum at 0x00000000147A8E48>,
     update_learning_rate=0.01, update_momentum=0.9,
     use_label_encoder=False, verbose=1,
     y_tensor_type=TensorType(int32, vector))

Prepare Submission File


In [12]:
make_submission(net0, X_test, ids, encoder)


Wrote submission to file my_neural_net_submission.csv.