In [1]:
%matplotlib inline
from matplotlib import pyplot as plt
from sklearn.grid_search import GridSearchCV
from sklearn.cross_validation import train_test_split
from sklearn.externals import joblib
from sklearn.metrics import accuracy_score
from sknn.mlp import Classifier, Layer
from sknn import ae, mlp
import numpy as np
import pandas as pd
from time import time

In [2]:
pd.set_option('display.precision', 4)

In [3]:
features = joblib.load("./mldata/features_1200.mat")
labels = joblib.load("./mldata/lables_1200.mat")

features = np.array(features, 'int16')
labels = np.array(labels, 'int')

t0 = time()
def scale(X, eps = 0.001):
    # scale the data points s.t the columns of the feature space
    # (i.e the predictors) are within the range [0, 1]
    return (X - np.min(X, axis = 0)) / (np.max(X, axis = 0) + eps)

features = features.astype("float32")
features = scale(features)

print "escape time : ", round(time()-t0, 3), "s"

# scale the data to the range [0, 1] and then construct the training
# and testing splits
(trainX, testX, trainY, testY) = train_test_split(features, labels, test_size = 0.1)

print "the shape of training set %s rows, %s columns" %(trainX.shape[0], trainX.shape[1])
print "the shape of test set %s rows, %s columns" %(testX.shape[0], testX.shape[1])
print "the range of training set : %s ~ %s" %(trainX.min(),trainX.max())
print "the range of test set : %s ~ %s" %(testX.min(),testX.max())


escape time :  0.091 s
the shape of training set 5400 rows, 784 columns
the shape of test set 600 rows, 784 columns
the range of training set : 0.0 ~ 0.999996
the range of test set : 0.0 ~ 0.999996

In [13]:
score = []

for n in range(1, 20):
    # Initialize auto-encoder for unsupervised learning.
    myae = ae.AutoEncoder(
                layers=[
                ae.Layer("Tanh", units=128),
                ae.Layer("Sigmoid", units=n)],
                learning_rate=0.002,
                n_iter=2)

    t0 = time()
    # Layerwise pre-training using only the input data.
    myae.fit(trainX)

    # Transfer the weights from the auto-encoder.
    ae_trainX = myae.transform(trainX)
    ae_testX = myae.transform(testX)

    # Initialize the multi-layer perceptron with same base layers.
    mymlp = mlp.Regressor(
                layers=[
                    mlp.Layer("Rectifier", units=6),
                    mlp.Layer("Softmax")],
            verbose=1,
            learning_rate=0.009,
            n_iter=2
    )

    # Now perform supervised-learning as usual.
    
    mymlp.fit(ae_trainX, trainY)
    print "the number of unit : %s" %n
    print "escape time : ", round(time()-t0, 3), "s"

    preds = mymlp.predict(ae_testX)
    print "accuracy score : %s" %(accuracy_score(testY, preds))
    score.append(accuracy_score(testY, preds))


the number of unit : 1
escape time :  30.056 s
accuracy score : 0.165
the number of unit : 2
escape time :  31.665 s
accuracy score : 0.165
the number of unit : 3
escape time :  29.997 s
accuracy score : 0.165
the number of unit : 4
escape time :  28.718 s
accuracy score : 0.165
the number of unit : 5
escape time :  30.268 s
accuracy score : 0.165
the number of unit : 6
escape time :  29.941 s
accuracy score : 0.165
the number of unit : 7
escape time :  29.23 s
accuracy score : 0.165
the number of unit : 8
escape time :  29.639 s
accuracy score : 0.165
the number of unit : 9
escape time :  29.966 s
accuracy score : 0.165
the number of unit : 10
escape time :  29.867 s
accuracy score : 0.165
the number of unit : 11
escape time :  29.998 s
accuracy score : 0.165
the number of unit : 12
escape time :  30.624 s
accuracy score : 0.165
the number of unit : 13
escape time :  29.963 s
accuracy score : 0.165
the number of unit : 14
escape time :  30.428 s
accuracy score : 0.165
the number of unit : 15
escape time :  28.942 s
accuracy score : 0.165
the number of unit : 16
escape time :  30.429 s
accuracy score : 0.165
the number of unit : 17
escape time :  30.9 s
accuracy score : 0.165
the number of unit : 18
escape time :  29.028 s
accuracy score : 0.165
the number of unit : 19
escape time :  30.602 s
accuracy score : 0.165

In [14]:
df = pd.DataFrame({'Autoencoder' : score}, index=range(1, 20))

In [17]:
df.head(5)


Out[17]:
Autoencoder
1 0.165
2 0.165
3 0.165
4 0.165
5 0.165

In [18]:
df.describe()


Out[18]:
Autoencoder
count 19.000
mean 0.165
std 0.000
min 0.165
25% 0.165
50% 0.165
75% 0.165
max 0.165

In [19]:
df.plot(title='Autoencoder', figsize=(8,6))
plt.xlabel('unit')
plt.ylabel('accuracy')


Out[19]:
<matplotlib.text.Text at 0x115761f50>

In [47]:
df[['Linear', 'Softmax', 'Gaussian']].plot(title='nonlinear activation function', figsize=(8,6))
plt.xlabel('unit')
plt.ylabel('accuracy')


Out[47]:
<matplotlib.text.Text at 0x10c556f10>

In [5]:
n = 10

# Initialize auto-encoder for unsupervised learning.
myae = ae.AutoEncoder(
            layers=[
            ae.Layer("Tanh", units=784),
            ae.Layer("Tanh", units=n),
            ae.Layer("Tanh", units=784),],
            learning_rate=0.002,
            n_iter=10)

t0 = time()
# Layerwise pre-training using only the input data.
myae.fit(trainX)

# Transfer the weights from the auto-encoder.
ae_trainX = myae.transform(trainX)
ae_testX = myae.transform(testX)

# Initialize the multi-layer perceptron with same base layers.
mymlp = mlp.Regressor(
            layers=[
                mlp.Layer("Rectifier", units=n),
                mlp.Layer("Softmax")],
        verbose=1,
        learning_rate=0.009,
        n_iter=2
)

# Now perform supervised-learning as usual.

mymlp.fit(ae_trainX, trainY)
print "the number of unit : %s" %n
print "escape time : ", round(time()-t0, 3), "s"

preds = mymlp.predict(ae_testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))


the number of unit : 10
escape time :  510.894 s
accuracy score : 0.15

In [6]:
# Initialize auto-encoder for unsupervised learning.
myae = ae.AutoEncoder(
            layers=[
            ae.Layer("Tanh", units=500),
            ae.Layer("Tanh", units=250),
            ae.Layer("Tanh", units=30),
            ae.Layer("Tanh", units=250),
            ae.Layer("Tanh", units=500),
            ae.Layer("Tanh", units=784),],
            learning_rate=0.002,
            n_iter=50)

# Layerwise pre-training using only the input data.
myae.fit(trainX)

# Transfer the weights from the auto-encoder.
ae_trainX = myae.transform(trainX)
ae_testX = myae.transform(testX)

In [13]:
# Initialize the multi-layer perceptron with same base layers.
mymlp = mlp.Regressor(
            layers=[
                mlp.Layer("Tanh", units=100),
                mlp.Layer("Softmax")],
        verbose=1,
        learning_rate=0.09,
        n_iter=10
)

# Now perform supervised-learning as usual.
t0 = time()
mymlp.fit(ae_trainX, trainY)
print "the number of unit : %s" %n
print "escape time : ", round(time()-t0, 3), "s"

preds = mymlp.predict(ae_testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))


the number of unit : 10
escape time :  24.966 s
accuracy score : 0.15

In [9]:
ae_trainX[0]


Out[9]:
array([  7.90052571e-01,  -3.31797391e-01,  -1.55838878e-01,
        -3.36295232e-01,   4.18136630e-02,   2.40917462e-01,
         1.25489864e-01,  -9.11007702e-02,  -1.87554847e-01,
        -1.60902788e-01,  -2.87806166e-01,  -2.13745782e-01,
        -5.61345595e-03,   3.43152171e-01,  -7.70333064e-01,
         3.75467692e-02,   8.95112264e-02,   3.32179816e-01,
         6.50874569e-02,  -2.59841885e-01,  -2.89816417e-01,
        -5.54409668e-01,  -1.83166972e-01,   7.15095256e-03,
        -2.15607329e-01,   2.64099947e-01,  -7.14863681e-02,
        -2.64623659e-01,   1.01111091e-03,  -1.76622756e-01,
         1.63937484e-01,   1.04456041e-01,  -3.98662212e-02,
         8.66279921e-02,   4.13408422e-01,   4.43851335e-01,
         1.18752174e-01,  -1.06595532e-01,   2.45063941e-01,
         5.05783539e-01,   2.18177542e-01,   1.75950035e-01,
         2.58544434e-01,  -3.16358324e-01,   1.93841964e-01,
         2.65631379e-01,   2.55274587e-01,  -2.19056364e-01,
        -2.14430253e-01,  -3.00366245e-01,  -2.96204831e-02,
         2.67273558e-01,   6.92268166e-02,   8.26485624e-02,
         7.75743936e-02,   2.54387463e-01,  -5.87915481e-02,
         4.52599501e-01,  -2.20078780e-01,  -5.73155381e-01,
        -4.59304520e-01,  -2.12729397e-01,   4.59082318e-01,
         2.01075874e-01,  -2.89284904e-02,  -2.32627053e-01,
         6.26171209e-01,   7.26029022e-03,  -1.95172349e-01,
        -1.59436079e-01,   1.06965041e-01,  -9.27010966e-02,
        -3.66542646e-01,   2.19608578e-01,  -1.53438605e-01,
        -5.98330538e-02,   2.74006112e-01,  -3.22730509e-01,
        -1.99321447e-01,  -6.02950495e-02,   4.10055194e-03,
         8.47900225e-02,   1.47922368e-01,   1.96215280e-01,
        -1.86188333e-01,   1.23014106e-01,  -3.12154398e-01,
        -1.69797495e-01,  -3.48695327e-01,   4.49322504e-02,
        -1.18220174e-01,   1.20673980e-01,   9.64491280e-02,
        -4.07277477e-02,   8.66722114e-02,   4.86348491e-01,
         1.00455057e-01,  -1.93235244e-01,  -2.88749001e-01,
         2.06887038e-01,   1.37452670e-01,   2.14732476e-01,
        -1.17096680e-01,  -7.40642794e-01,  -1.52556702e-02,
         2.92332307e-01,   1.50709295e-01,  -6.49602021e-02,
        -2.06690006e-01,   1.95823201e-01,  -1.32576623e-01,
        -7.07789732e-02,  -2.33536639e-02,   1.39665768e-01,
        -8.39363533e-02,  -1.07177225e-01,   4.24374744e-02,
        -2.33819515e-01,   1.07661508e-01,   3.67471188e-02,
        -1.13851102e-01,  -6.83322871e-02,  -1.31366915e-01,
         9.76497717e-02,   8.95064993e-02,   1.36962383e-01,
         1.07922073e-01,  -8.04541085e-01,   1.27865300e-01,
        -3.82134711e-01,  -2.00068474e-01,   5.27815362e-02,
        -2.43712749e-02,  -2.87687092e-01,   1.64207013e-02,
        -6.91351090e-02,  -3.00368576e-01,   1.96688886e-01,
        -1.35138967e-02,   1.51369976e-01,   2.82934902e-01,
        -3.74182011e-01,  -7.22715249e-02,  -1.62651569e-01,
         2.70727422e-01,  -5.14208517e-01,   1.94728151e-01,
         1.92531046e-02,   3.34863948e-01,   2.51515437e-01,
        -6.43446506e-01,  -2.87753780e-02,   4.44371165e-01,
        -1.15528096e-01,   1.51868480e-01,   3.40909383e-02,
        -4.32505630e-01,   5.34322287e-01,   1.42358135e-02,
         1.90633928e-01,  -1.38758772e-01,  -9.84558529e-02,
         1.99822192e-01,   1.05071747e-04,  -2.24331624e-01,
        -1.00333184e-02,   7.73955343e-02,  -2.10738163e-01,
         1.45537388e-02,  -5.65201826e-02,  -5.21353629e-02,
        -3.39634864e-01,   4.98820734e-02,  -5.16878092e-01,
         8.26590660e-01,   3.18786590e-01,   1.88661323e-02,
         5.02015778e-02,   3.79953509e-01,   2.46038018e-01,
        -6.02225882e-01,   3.99451527e-02,  -6.10097940e-02,
        -7.64844909e-02,   5.79220671e-01,   7.30527021e-02,
         1.39907807e-02,   2.57189370e-01,   5.41623004e-04,
         2.36284769e-01,  -2.88515580e-01,  -5.98695346e-02,
         7.58848241e-02,  -5.24692375e-01,  -2.86092913e-01,
        -6.61331966e-01,   8.23332313e-01,  -7.14888957e-02,
         2.98697338e-01,   3.02531604e-01,   3.63311854e-01,
        -2.99737421e-02,   7.70676717e-02,   4.37177694e-02,
        -5.49440484e-02,  -3.19475859e-01,  -1.01311705e-01,
        -4.35788803e-01,   1.74108453e-01,  -4.65014247e-01,
         1.25763714e-01,  -3.64907479e-01,   2.74270920e-03,
         1.44042996e-01,   1.10630025e-01,  -3.29191797e-01,
         2.46008348e-01,  -9.85608884e-02,   8.10612533e-02,
        -7.29078507e-02,  -2.99344197e-01,  -1.15069632e-02,
         1.39237863e-01,   2.59463129e-01,  -5.40427622e-02,
         1.28535021e-01,   1.27062077e-01,   1.68329177e-01,
        -8.59624615e-02,   8.28333935e-02,  -2.30160952e-01,
         1.43696370e-01,  -1.13275177e-01,  -2.10357876e-01,
        -7.29905100e-03,   7.15363498e-02,  -1.92673187e-01,
         3.04424636e-01,   1.26612749e-01,   2.37317968e-01,
         3.47106739e-01,  -5.16648508e-01,   4.06723561e-02,
        -6.41744814e-01,  -1.03018494e-02,   5.15867009e-01,
        -1.32752273e-01,   1.64450483e-01,   5.69540552e-01,
        -5.39864713e-02,  -4.49082731e-01,   2.94739256e-01,
        -2.94543545e-01,   2.95315987e-02,  -4.04395466e-01,
         4.09178498e-02,   3.00152717e-02,  -4.50070064e-01,
        -5.42477292e-02,   5.30288630e-01,   2.60205344e-01,
         5.40364205e-01,   3.68393067e-01,   2.99784227e-01,
         1.67776279e-01,  -1.84041012e-01,  -1.55076865e-01,
         2.34630767e-01,  -3.09691167e-01,  -1.22693210e-01,
         1.90374262e-01,   7.44164277e-01,   6.51414940e-02,
        -1.18719426e-01,   2.09804855e-01,   8.06536581e-02,
         8.08157975e-01,   9.72071925e-02,   1.63445606e-01,
         3.61304318e-01,  -4.99770198e-03,  -2.42241375e-02,
        -3.02646804e-01,   1.83288672e-01,   9.85324382e-02,
         8.16802973e-02,   6.53047855e-01,  -6.64141337e-01,
         6.80910915e-02,   2.15165681e-01,  -5.44640624e-01,
        -1.24255531e-01,   1.77597487e-01,  -2.35468849e-01,
         1.16106629e-03,  -6.39552409e-02,   1.77380589e-01,
         1.41182217e-01,   3.83117024e-02,   6.26462161e-02,
        -1.85514920e-01,  -1.84931525e-01,  -5.45093434e-02,
        -8.42380193e-03,  -7.29352362e-01,   1.93456412e-01,
        -2.95585570e-01,   1.96973657e-01,   6.32540667e-03,
         2.76147509e-03,   5.35788717e-02,  -7.74299865e-02,
        -2.41075608e-01,  -4.92957510e-01,  -3.56367326e-01,
         5.13388707e-01,  -1.83035277e-02,   6.15745468e-05,
        -2.63304920e-01,   5.77838501e-02,   1.91600320e-01,
        -3.77721627e-02,  -1.05100776e-02,  -8.99265702e-01,
        -2.69322496e-01,  -1.24687251e-01,  -2.26804234e-01,
        -1.44120547e-01,   1.26320842e-01,  -1.19451549e-01,
        -4.98541644e-01,  -2.34996064e-01,  -1.57300400e-01,
         8.44268676e-01,  -7.53556146e-01,  -4.53302294e-01,
        -3.95435962e-01,   3.99030986e-01,   5.95954887e-02,
         1.96760467e-01,   5.46825712e-01,  -3.20556382e-01,
         6.70924447e-02,   5.28358885e-02,   7.10268145e-02,
         5.32463876e-01,  -1.51364714e-01,  -1.51107901e-01,
         2.19537960e-01,  -6.36953890e-02,  -4.16855279e-01,
         1.75406650e-01,   1.20922879e-01,  -7.11677154e-01,
        -5.49249705e-01,   8.31139701e-02,  -1.95383216e-01,
         2.56361553e-02,   2.49905788e-01,  -2.60918706e-01,
        -3.37814817e-01,   2.08867132e-01,  -4.63535382e-01,
         5.62535791e-01,   1.91968314e-01,  -4.81198384e-02,
        -1.74769059e-01,  -7.16625940e-02,   1.33866287e-01,
        -4.77460076e-01,   1.44926718e-01,   3.06780666e-02,
        -5.15230690e-01,  -3.67544770e-01,  -2.76954275e-01,
        -2.29709359e-01,   2.11290067e-01,  -1.95002847e-01,
         1.26120635e-01,  -1.39695729e-01,  -5.46300222e-01,
        -6.61235925e-02,   3.96006677e-01,  -1.87230912e-01,
         3.88754268e-01,  -6.71059196e-02,  -1.18247290e-01,
         8.94976012e-02,   9.87890384e-02,   7.18786946e-03,
         9.09121401e-01,  -3.95437395e-02,  -3.66343360e-01,
        -4.43164674e-01,   6.29603398e-02,  -1.27507356e-01,
         4.29055956e-02,  -2.97388348e-01,   1.67357082e-01,
        -5.92526926e-02,   1.77081227e-01,  -5.83784681e-01,
        -3.00743272e-01,   9.10647827e-01,  -1.43176192e-01,
        -1.45194306e-01,  -2.75844039e-01,  -4.98541704e-01,
        -1.40220626e-01,  -3.22169470e-01,   3.08753911e-01,
        -5.18708954e-01,  -7.40386565e-02,  -1.86075355e-01,
         1.39570600e-01,  -2.34682496e-01,   6.43574492e-01,
         5.83472387e-01,   9.86620359e-02,  -7.59337608e-03,
        -1.95717032e-01,   6.09358294e-01,  -6.02127532e-01,
        -1.10993842e-02,  -4.83531902e-01,  -7.97007018e-01,
         6.37605261e-01,   8.55210406e-02,   1.15799553e-01,
        -1.09920833e-01,  -8.65339731e-01,  -2.58665567e-01,
        -5.35132088e-01,   5.99677895e-02,   7.97873290e-01,
         1.40541768e-01,  -4.04805266e-02,   1.87671078e-01,
        -4.07039358e-01,   1.09128909e-01,  -2.90045970e-01,
         2.32634851e-01,  -2.95310878e-01,  -1.36874883e-02,
        -1.79259283e-01,   1.86903018e-01,  -1.62556306e-01,
         2.56729785e-01,  -3.43399701e-01,   3.74714817e-02,
        -2.30109643e-01,  -7.86863631e-01,   5.59937213e-01,
         3.04877620e-01,  -2.38780962e-01,  -4.62393464e-02,
        -2.27846431e-01,   2.10415475e-01,   1.62330036e-02,
        -1.62802190e-01,   1.89961717e-02,   2.13476443e-01,
         1.94522466e-01,   3.30468273e-01,   2.18326553e-02,
         6.64341646e-02,   5.40361758e-02,   2.45339610e-01,
        -1.22892769e-01,  -1.98706989e-01,  -3.95123064e-01,
         5.68324641e-01,  -6.58557721e-02,  -2.04819220e-01,
        -3.46930396e-01,  -3.87332602e-01,  -7.14671516e-03,
        -8.15238450e-01,   6.49220758e-02,  -2.74216104e-01,
         1.27924184e-01,  -1.25108794e-01,  -1.41722757e-01,
        -2.10527057e-01,   1.85842329e-01,   3.97953885e-01,
        -1.61192233e-01,   7.38996691e-01,  -1.12416948e-01,
         5.60159257e-01,  -2.32684199e-01,  -2.04323323e-01,
        -5.92040748e-02,   5.47953556e-02,   4.49551621e-01,
        -1.67964711e-01,  -1.97009649e-01,  -1.59481073e-01,
         2.35198310e-01,  -2.42245091e-01,  -8.12731288e-01,
        -3.07659749e-01,  -2.04764511e-01,   2.34587793e-01,
        -5.05942880e-02,   5.00597665e-01,  -4.38318971e-02,
        -6.59357018e-01,  -1.93738414e-01,   3.86521985e-02,
         2.56902569e-01,  -1.66066045e-01,  -4.88775446e-01,
        -2.66582673e-01,   8.03949987e-02,   3.61010557e-02,
         6.05764558e-02,  -1.95206423e-01,   7.93142969e-02,
        -3.85865299e-01,  -6.19772493e-02,   1.37829044e-01,
        -3.63829794e-01,   6.21696025e-01,   9.75140657e-02,
         4.01367150e-02,  -1.45262310e-02,  -8.53563262e-02,
         1.78066616e-02,  -1.47777826e-01,   1.73320659e-01,
        -2.63674487e-01,   5.99438245e-02,   6.97315834e-01,
        -3.24900787e-01,   4.29986023e-01,   1.16771568e-01,
         4.80621403e-02,  -3.54953812e-03,   1.02378613e-03,
        -1.97619867e-01,  -8.44522888e-03,  -3.42460469e-01,
        -2.13662037e-01,   5.62269855e-01,   2.74705908e-01,
         4.70925332e-02,   6.22074880e-03,  -2.36558866e-01,
        -6.61460770e-01,   4.56882129e-01,   1.64041327e-01,
         2.51595113e-01,   3.83141639e-01,   1.21729438e-01,
         3.19214607e-01,  -1.87229408e-01,   9.16784373e-02,
         1.02446428e-01,  -9.24828360e-02,   3.73667285e-01,
         8.63248218e-02,  -6.82177529e-01,   8.20906208e-02,
        -1.10305458e-01,  -2.01299779e-01,  -4.28677634e-03,
         8.18032595e-02,   1.76509224e-01,  -4.47081663e-03,
        -6.55436999e-03,  -2.76801135e-01,  -1.10902384e-01,
         5.32734910e-01,   8.61176203e-02,   2.12917216e-02,
        -2.70386844e-01,  -3.71924367e-01,  -4.52705222e-01,
         1.04393432e-02,   2.49102605e-01,  -2.91865692e-01,
        -3.48792630e-02,  -2.75884781e-02,  -5.36246029e-01,
        -1.78663496e-01,  -5.89840423e-01,  -4.27710448e-01,
         2.10392161e-01,  -8.79764400e-02,   3.75240872e-01,
        -3.35302811e-01,   2.26891712e-01,   2.23450801e-02,
         7.10755209e-01,  -2.09825571e-01,   6.65670921e-02,
        -1.39961614e-01,  -1.46731828e-01,  -1.97167323e-01,
        -3.46320916e-02,   8.71826261e-01,  -1.11990626e-01,
        -3.86634987e-01,  -2.80629251e-01,   5.00050416e-01,
        -7.08069009e-01,  -5.28607563e-01,  -6.34232589e-01,
        -7.02311147e-02,  -4.58887673e-01,  -1.93843700e-01,
        -6.85200416e-02,   5.86426853e-01,   1.42462632e-01,
        -3.45994638e-02,  -3.68713165e-01,   4.46390812e-02,
        -3.48382199e-01,  -1.26416343e-01,  -2.37793252e-01,
         7.78960301e-01,  -6.41139794e-01,  -4.19027857e-01,
         1.66500784e-01,   3.71359012e-01,  -1.36912011e-01,
         1.45891067e-01,  -8.79914392e-02,   2.73283192e-01,
         3.45086919e-02,  -1.84904525e-01,  -3.67958258e-01,
        -1.20977485e-01,   2.12651154e-01,  -9.15193275e-01,
         3.05336678e-01,   2.16164523e-01,  -6.92019806e-01,
        -1.84000417e-01,   1.34111811e-01,  -2.24324798e-01,
         1.14294143e-01,   1.98077275e-02,   1.85300809e-01,
        -9.80619418e-03,  -1.92913398e-02,  -2.36539720e-01,
        -1.53115601e-01,   4.71581585e-01,   1.95345928e-01,
        -1.18000622e-01,   1.28501862e-02,   2.89400983e-01,
        -1.60259405e-01,   4.05649912e-02,  -3.11039094e-01,
        -3.24925075e-01,  -1.92796523e-01,   2.80936002e-01,
        -3.66222949e-01,  -1.86008175e-02,  -2.41665668e-01,
        -2.15444656e-01,  -2.56712599e-01,   4.30815282e-02,
         1.49472405e-01,   2.45861319e-01,   3.31598394e-01,
         2.88961121e-01,  -1.00728755e-01,  -8.41220387e-01,
        -5.56387902e-01,   1.95883108e-01,   2.70084799e-01,
         9.51934151e-02,  -1.37190931e-01,  -1.89847920e-01,
        -3.69557452e-02,   2.15921726e-01,   6.88752224e-02,
        -1.77870779e-01,  -1.42032923e-01,  -1.41398115e-01,
        -7.24783046e-01,  -8.81508110e-02,  -2.22352324e-01,
         2.35105027e-01,   3.12677398e-01,  -2.16755334e-01,
         8.32321826e-02,   3.22372369e-01,  -1.29006007e-02,
        -5.59726832e-01,  -2.37967379e-01,  -1.58324627e-01,
         2.93735064e-01,   7.08938163e-02,   2.20173947e-01,
        -1.53945705e-01,  -2.43049561e-01,   1.22305046e-01,
         1.94345392e-02,   8.37023722e-01,   1.68625461e-01,
        -3.39413568e-01,  -6.46710063e-02,  -9.93609720e-02,
         1.59217707e-01,   7.25952816e-01,  -5.36743153e-01,
        -5.26808149e-02,  -1.07497445e-01,  -3.39380175e-02,
        -1.79674765e-01,  -5.77894089e-01,   2.52163664e-01,
        -1.75021775e-01,  -2.27492093e-01,  -1.77003318e-01,
         2.42019045e-01,   4.92628065e-02,  -7.10352677e-01,
         9.27486270e-02,  -7.63502772e-01,  -2.37594582e-01,
        -1.56694333e-01,  -3.90884813e-01,  -4.63178973e-01,
         1.12490858e-01,   1.06569298e-01,  -1.73225999e-01,
         3.65239824e-01,   2.75756321e-01,   4.15858274e-01,
         1.17057418e-01,  -8.72119877e-02,  -4.58423118e-02,
        -3.83792685e-01,   1.62858781e-01,  -1.09083490e-01,
        -5.59982247e-02,  -5.71930757e-02,   1.70097365e-02,
        -4.40446015e-01,   4.21972996e-01,  -2.13903174e-01,
        -4.09337710e-02,   9.64410078e-02,  -1.13804160e-01,
        -1.78377711e-01,   3.43188490e-02,   4.79258663e-01,
        -6.84670885e-01,   8.63184856e-01,   4.39708886e-01,
         1.09757050e-01,   2.27416425e-01,   1.64164515e-01,
         7.60971405e-02,  -2.80810102e-01,  -2.56790041e-01,
         2.43297498e-01,   2.24202162e-01,   7.28094713e-01,
        -4.52712372e-01,  -1.76960780e-01,  -7.14768224e-01,
         3.85060057e-01,  -2.28117514e-01,   2.59149173e-01,
        -4.52388517e-01,  -3.20359435e-01,  -9.39943397e-02,
        -4.20438139e-01,  -1.70840616e-01,  -1.65965188e-02,
        -2.77320735e-01,  -4.80999355e-01,  -2.74983941e-01,
         2.06291797e-02,  -6.51333242e-01,   4.30654970e-02,
        -1.60389967e-01])

In [ ]:
# Initialize the multi-layer perceptron with same base layers.
mymlp = mlp.Regressor(
            layers=[
                mlp.Layer("Rectifier", units=n),
                mlp.Layer("Softmax")],
        verbose=1,
        learning_rate=0.009,
        n_iter=2
)

# Now perform supervised-learning as usual.

mymlp.fit(ae_trainX, trainY)
print "the number of unit : %s" %n
print "escape time : ", round(time()-t0, 3), "s"

preds = mymlp.predict(ae_testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))