In [1]:
from sklearn.grid_search import GridSearchCV
from sklearn.cross_validation import train_test_split
from sklearn.metrics import classification_report
from sklearn.externals import joblib
from sklearn.metrics import accuracy_score
from sklearn.metrics import confusion_matrix
from sknn.mlp import Classifier, Layer
from sknn.mlp import Classifier, Convolution, FastVectorSpace, Layer, MultiLayerPerceptron
import numpy as np
from time import time
from glob import glob
import os

In [2]:
np.set_printoptions(precision=4)
np.set_printoptions(suppress=True)

In [3]:
### Plan2 ###

In [3]:
features = joblib.load("./mldata/features_1200.mat")
labels = joblib.load("./mldata/lables_1200.mat")

features = np.array(features, 'int16')
labels = np.array(labels, 'int')

t0 = time()
def scale(X, eps = 0.001):
    # scale the data points s.t the columns of the feature space
    # (i.e the predictors) are within the range [0, 1]
    return (X - np.min(X, axis = 0)) / (np.max(X, axis = 0) + eps)

features = features.astype("float32")
features = scale(features)

print "escape time : ", round(time()-t0, 3), "s"

# scale the data to the range [0, 1] and then construct the training
# and testing splits
(trainX, testX, trainY, testY) = train_test_split(features, labels, test_size = 0.1)
print "the shape of training set %s rows, %s columns" %(trainX.shape[0], trainX.shape[1])
print "the shape of test set %s rows, %s columns" %(testX.shape[0], testX.shape[1])
print "the range of training set : %s ~ %s" %(trainX.min(),trainX.max())
print "the range of test set : %s ~ %s" %(testX.min(),testX.max())


escape time :  0.063 s
the shape of training set 5400 rows, 784 columns
the shape of test set 600 rows, 784 columns
the range of training set : 0.0 ~ 0.999996
the range of test set : 0.0 ~ 0.999996

In [10]:
# Classification 
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
nn = Classifier(
    layers=[
        Layer("Rectifier", units=300),
        Layer("Softmax")],
    learning_rate=0.01,
    n_iter=10)
nn.fit(trainX, trainY)

# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)

print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)


accuracy score : 0.925
classification report : 
             precision    recall  f1-score   support

          0       0.94      0.94      0.94        64
          1       0.92      0.95      0.93        96
          2       0.88      0.91      0.89        56
          3       0.92      0.87      0.90        55
          4       0.99      0.93      0.96        75
          5       0.87      0.97      0.92        62
          6       0.91      0.71      0.79        41
          7       0.95      1.00      0.97        92
          8       0.93      0.92      0.92        59

avg / total       0.93      0.93      0.92       600


In [11]:
# Classification 
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
nn = Classifier(
    layers=[
        Layer("Sigmoid", units=300),
        Layer("Softmax")],
    learning_rate=0.01,
    n_iter=10)
nn.fit(trainX, trainY)

# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)

print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)


accuracy score : 0.923333333333
classification report : 
             precision    recall  f1-score   support

          0       0.94      0.95      0.95        64
          1       0.90      0.95      0.92        96
          2       0.91      0.89      0.90        56
          3       0.96      0.87      0.91        55
          4       0.99      0.93      0.96        75
          5       0.84      0.92      0.88        62
          6       0.97      0.71      0.82        41
          7       0.92      1.00      0.96        92
          8       0.93      0.95      0.94        59

avg / total       0.93      0.92      0.92       600


In [14]:
# Classification 
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
nn = Classifier(
    layers=[
        Layer("Tanh", units=300),
        Layer("Softmax")],
    learning_rate=0.01,
    n_iter=10)
nn.fit(trainX, trainY)

# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)

print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)


accuracy score : 0.928333333333
classification report : 
             precision    recall  f1-score   support

          0       0.95      0.94      0.94        64
          1       0.92      0.95      0.93        96
          2       0.96      0.89      0.93        56
          3       0.89      0.91      0.90        55
          4       0.99      0.93      0.96        75
          5       0.86      0.95      0.90        62
          6       0.94      0.71      0.81        41
          7       0.93      1.00      0.96        92
          8       0.93      0.95      0.94        59

avg / total       0.93      0.93      0.93       600


In [13]:
# Classification 
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
nn = Classifier(
    layers=[
        Layer("Maxout", units=300, pieces=2),
        Layer("Softmax")],
    learning_rate=0.01,
    n_iter=10)
nn.fit(trainX, trainY)

# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)

print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)


accuracy score : 0.928333333333
classification report : 
             precision    recall  f1-score   support

          0       0.92      0.95      0.94        64
          1       0.91      0.95      0.93        96
          2       0.91      0.91      0.91        56
          3       0.91      0.89      0.90        55
          4       0.99      0.93      0.96        75
          5       0.92      0.92      0.92        62
          6       0.88      0.73      0.80        41
          7       0.93      1.00      0.96        92
          8       0.97      0.95      0.96        59

avg / total       0.93      0.93      0.93       600


In [6]:
# Grid Search
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers

nn = Regressor(
    layers=[
        Layer("Rectifier", units=300),# 첫번째 히든레이어
        Layer("Rectifier", units=300),# 두번째 히든레이어
        Layer("Softmax")], # 아웃풋 레이어
    verbose=1)

gs = GridSearchCV(nn, param_grid={
    'learning_rate': [0.01, 0.05],
    'n_iter' : [10, 20],
    'hidden0__units': [300, 400],
    'hidden0__type': ["Rectifier", "Tanh"], # 첫번째 히든레이어
    'hidden1__units': [300, 400],
    'hidden1__type': ["Rectifier", "Tanh"]}) # 두번째 히든레이어
gs.fit(trainX, trainY)


# compute the predictions for the test data and show a classification report
preds = gs.predict(testX)

print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)


accuracy score : 0.161666666667
classification report : 
             precision    recall  f1-score   support

          0       0.00      0.00      0.00        61
          1       0.16      1.00      0.28        97
          2       0.00      0.00      0.00        73
          3       0.00      0.00      0.00        59
          4       0.00      0.00      0.00        64
          5       0.00      0.00      0.00        56
          6       0.00      0.00      0.00        44
          7       0.00      0.00      0.00        88
          8       0.00      0.00      0.00        58

avg / total       0.03      0.16      0.04       600


In [4]:
# Grid Search
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers

t0 = time()

nn = Classifier(
    layers=[
        Layer("Rectifier", units=300), # 첫번째 히든레이어
        Layer("Softmax")], # 아웃풋 레이어
    verbose=2)

gs = GridSearchCV(nn, param_grid={
    'learning_rate': [0.009],
    'n_iter' : [10],
    'hidden0__units': [100, 200, 300],
    'hidden0__weight_decay' : [0.9, 0.09, 0.009, 0.0009, 0.00009],
    'hidden0__type': ["Rectifier", "Tanh"] # 첫번째 히든레이어
    })
gs.fit(trainX, trainY)


# compute the predictions for the test data and show a classification report
preds = gs.predict(testX)

print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print gs.best_estimator_
print "escape time : ", round(time()-t0, 3), "s"


accuracy score : 0.93
classification report : 
             precision    recall  f1-score   support

          0       0.96      0.96      0.96        67
          1       0.92      0.94      0.93       105
          2       0.98      0.90      0.94        61
          3       0.94      0.91      0.92        53
          4       0.95      0.94      0.95        86
          5       0.88      0.98      0.93        59
          6       0.71      0.80      0.75        30
          7       0.99      0.92      0.95        76
          8       0.95      0.94      0.94        63

avg / total       0.93      0.93      0.93       600

Classifier(batch_size=1, debug=False, dropout_rate=None, f_stable=0.001,
      hidden0=<sknn.nn.Layer `Rectifier`: name=u'hidden0', units=200, weight_decay=9e-05>,
      layers=[<sknn.nn.Layer `Rectifier`: name=u'hidden0', units=200, weight_decay=9e-05>, <sknn.nn.Layer `Softmax`: name=u'output', units=9>],
      learning_momentum=0.9, learning_rate=0.009, learning_rule=u'sgd',
      loss_type=u'mse', n_iter=10, n_stable=50,
      output=<sknn.nn.Layer `Softmax`: name=u'output', units=9>,
      random_state=None, regularize=None, valid_set=None, valid_size=0.0,
      verbose=2, weight_decay=None)
escape time :  2534.717 s

In [10]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers

t0 = time()

nn = Classifier(
    layers=[
        Layer("Rectifier", units=200, weight_decay=0.00009), # 첫번째 히든레이어
        Layer("Softmax")], # 아웃풋 레이어
    learning_rate=0.009,
    n_iter=10,
    verbose=2)

nn.fit(trainX, trainY)


# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)

print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"


accuracy score : 0.93
classification report : 
             precision    recall  f1-score   support

          0       0.96      0.96      0.96        67
          1       0.92      0.94      0.93       105
          2       0.98      0.90      0.94        61
          3       0.94      0.91      0.92        53
          4       0.95      0.94      0.95        86
          5       0.88      0.98      0.93        59
          6       0.71      0.80      0.75        30
          7       0.99      0.92      0.95        76
          8       0.95      0.94      0.94        63

avg / total       0.93      0.93      0.93       600

escape time :  33.315 s

In [11]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers

t0 = time()

nn = Classifier(
    layers=[
        Layer("Rectifier", units=200, weight_decay=0.00009), # 첫번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 두번째 히든레이어
        Layer("Softmax")], # 아웃풋 레이어
    learning_rate=0.009,
    n_iter=10,
    verbose=2)

nn.fit(trainX, trainY)


# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)

print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"


accuracy score : 0.931666666667
classification report : 
             precision    recall  f1-score   support

          0       0.96      0.96      0.96        67
          1       0.93      0.94      0.94       105
          2       0.96      0.90      0.93        61
          3       0.94      0.91      0.92        53
          4       0.95      0.95      0.95        86
          5       0.85      0.98      0.91        59
          6       0.73      0.80      0.76        30
          7       0.99      0.92      0.95        76
          8       0.97      0.94      0.95        63

avg / total       0.93      0.93      0.93       600

escape time :  44.577 s

In [12]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers

t0 = time()

nn = Classifier(
    layers=[
        Layer("Rectifier", units=200, weight_decay=0.00009), # 첫번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 두번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 세번째 히든레이어
        Layer("Softmax")], # 아웃풋 레이어
    learning_rate=0.009,
    n_iter=10,
    verbose=2)

nn.fit(trainX, trainY)


# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)

print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"


accuracy score : 0.933333333333
classification report : 
             precision    recall  f1-score   support

          0       0.96      0.96      0.96        67
          1       0.93      0.95      0.94       105
          2       1.00      0.90      0.95        61
          3       0.94      0.91      0.92        53
          4       0.98      0.95      0.96        86
          5       0.88      0.97      0.92        59
          6       0.66      0.83      0.74        30
          7       0.99      0.92      0.95        76
          8       0.95      0.94      0.94        63

avg / total       0.94      0.93      0.93       600

escape time :  57.057 s

In [13]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers

t0 = time()

nn = Classifier(
    layers=[
        Layer("Rectifier", units=200, weight_decay=0.00009), # 첫번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 두번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 세번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 네번째 히든레이어
        Layer("Softmax")], # 아웃풋 레이어
    learning_rate=0.009,
    n_iter=10,
    verbose=2)

nn.fit(trainX, trainY)


# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)

print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"


accuracy score : 0.933333333333
classification report : 
             precision    recall  f1-score   support

          0       0.96      0.96      0.96        67
          1       0.93      0.94      0.93       105
          2       1.00      0.92      0.96        61
          3       0.94      0.92      0.93        53
          4       0.94      0.95      0.95        86
          5       0.91      0.98      0.94        59
          6       0.67      0.80      0.73        30
          7       0.97      0.92      0.95        76
          8       0.98      0.92      0.95        63

avg / total       0.94      0.93      0.93       600

escape time :  68.718 s

In [14]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers

t0 = time()

nn = Classifier(
    layers=[
        Layer("Rectifier", units=200, weight_decay=0.00009), # 첫번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 두번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 세번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 네번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 다섯번째 히든레이어
        Layer("Softmax")], # 아웃풋 레이어
    learning_rate=0.009,
    n_iter=10,
    verbose=2)

nn.fit(trainX, trainY)


# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)

print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"


accuracy score : 0.93
classification report : 
             precision    recall  f1-score   support

          0       0.98      0.94      0.96        67
          1       0.93      0.94      0.94       105
          2       0.98      0.90      0.94        61
          3       0.89      0.91      0.90        53
          4       0.96      0.95      0.96        86
          5       0.89      0.98      0.94        59
          6       0.63      0.80      0.71        30
          7       0.96      0.92      0.94        76
          8       1.00      0.94      0.97        63

avg / total       0.94      0.93      0.93       600

escape time :  82.627 s

In [15]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers

t0 = time()

nn = Classifier(
    layers=[
        Layer("Rectifier", units=200, weight_decay=0.00009), # 첫번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 두번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 세번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 네번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 다섯번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 여섯번째 히든레이어
        Layer("Softmax")], # 아웃풋 레이어
    learning_rate=0.009,
    n_iter=10,
    verbose=2)

nn.fit(trainX, trainY)


# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)

print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"


accuracy score : 0.928333333333
classification report : 
             precision    recall  f1-score   support

          0       0.97      0.94      0.95        67
          1       0.93      0.94      0.94       105
          2       0.96      0.89      0.92        61
          3       0.92      0.91      0.91        53
          4       0.96      0.94      0.95        86
          5       0.91      0.98      0.94        59
          6       0.67      0.80      0.73        30
          7       0.97      0.92      0.95        76
          8       0.92      0.95      0.94        63

avg / total       0.93      0.93      0.93       600

escape time :  94.971 s

In [16]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers

t0 = time()

nn = Classifier(
    layers=[
        Layer("Rectifier", units=200, weight_decay=0.00009), # 첫번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 두번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 세번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 네번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 다섯번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 여섯번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 7번째 히든레이어
        Layer("Softmax")], # 아웃풋 레이어
    learning_rate=0.009,
    n_iter=10,
    verbose=2)

nn.fit(trainX, trainY)


# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)

print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"


accuracy score : 0.926666666667
classification report : 
             precision    recall  f1-score   support

          0       0.98      0.94      0.96        67
          1       0.93      0.94      0.93       105
          2       1.00      0.89      0.94        61
          3       0.91      0.92      0.92        53
          4       0.96      0.94      0.95        86
          5       0.88      0.97      0.92        59
          6       0.62      0.83      0.71        30
          7       0.96      0.92      0.94        76
          8       0.98      0.92      0.95        63

avg / total       0.93      0.93      0.93       600

escape time :  106.679 s

In [17]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers

t0 = time()

nn = Classifier(
    layers=[
        Layer("Rectifier", units=200, weight_decay=0.00009), # 첫번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 두번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 세번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 네번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 다섯번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 여섯번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 7번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 8번째 히든레이어
        Layer("Softmax")], # 아웃풋 레이어
    learning_rate=0.009,
    n_iter=10,
    verbose=2)

nn.fit(trainX, trainY)


# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)

print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"


accuracy score : 0.926666666667
classification report : 
             precision    recall  f1-score   support

          0       1.00      0.94      0.97        67
          1       0.93      0.93      0.93       105
          2       0.98      0.90      0.94        61
          3       0.96      0.91      0.93        53
          4       0.96      0.95      0.96        86
          5       0.83      0.98      0.90        59
          6       0.63      0.80      0.71        30
          7       0.97      0.92      0.95        76
          8       0.95      0.92      0.94        63

avg / total       0.93      0.93      0.93       600

escape time :  115.55 s

In [19]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers

t0 = time()

nn = Classifier(
    layers=[
        Layer("Rectifier", units=200, weight_decay=0.00009), # 첫번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 두번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 세번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 네번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 다섯번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 여섯번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 7번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 8번째 히든레이어
        Layer("Rectifier", units=200, weight_decay=0.00009), # 9번째 히든레이어
        Layer("Softmax")], # 아웃풋 레이어
    learning_rate=0.009,
    n_iter=10,
    verbose=2)

nn.fit(trainX, trainY)


# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)

print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"


accuracy score : 0.921666666667
classification report : 
             precision    recall  f1-score   support

          0       0.97      0.94      0.95        67
          1       0.93      0.94      0.94       105
          2       0.96      0.90      0.93        61
          3       0.94      0.91      0.92        53
          4       0.96      0.94      0.95        86
          5       0.85      0.98      0.91        59
          6       0.55      0.70      0.62        30
          7       0.96      0.92      0.94        76
          8       1.00      0.92      0.96        63

avg / total       0.93      0.92      0.92       600

escape time :  135.532 s

In [20]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers

t0 = time()

nn = Classifier(
    layers=[
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 첫번째 히든레이어
        Layer("Softmax")], # 아웃풋 레이어
    learning_rate=0.009,
    n_iter=10,
    verbose=2)

nn.fit(trainX, trainY)


# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)

print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"


accuracy score : 0.93
classification report : 
             precision    recall  f1-score   support

          0       0.94      0.96      0.95        67
          1       0.93      0.94      0.94       105
          2       0.98      0.89      0.93        61
          3       0.94      0.91      0.92        53
          4       0.96      0.95      0.96        86
          5       0.85      0.98      0.91        59
          6       0.71      0.80      0.75        30
          7       0.99      0.92      0.95        76
          8       0.95      0.94      0.94        63

avg / total       0.93      0.93      0.93       600

escape time :  79.859 s

In [23]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers

t0 = time()

nn = Classifier(
    layers=[
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 1번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 2번째 히든레이어
        Layer("Softmax")], # 아웃풋 레이어
    learning_rate=0.009,
    n_iter=10,
    verbose=2)

nn.fit(trainX, trainY)


# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)

print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"


accuracy score : 0.926666666667
classification report : 
             precision    recall  f1-score   support

          0       0.89      0.97      0.93        67
          1       0.92      0.94      0.93       105
          2       0.98      0.89      0.93        61
          3       0.92      0.91      0.91        53
          4       0.96      0.94      0.95        86
          5       0.88      0.98      0.93        59
          6       0.71      0.73      0.72        30
          7       0.97      0.92      0.95        76
          8       1.00      0.94      0.97        63

avg / total       0.93      0.93      0.93       600

escape time :  77.639 s

In [26]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers

t0 = time()

nn = Classifier(
    layers=[
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 1번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 2번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 3번째 히든레이어
        Layer("Softmax")], # 아웃풋 레이어
    learning_rate=0.0009,
    n_iter=10,
    verbose=2)

nn.fit(trainX, trainY)


# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)

print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"


accuracy score : 0.935
classification report : 
             precision    recall  f1-score   support

          0       0.97      0.96      0.96        67
          1       0.93      0.94      0.93       105
          2       0.97      0.93      0.95        61
          3       0.92      0.92      0.92        53
          4       0.97      0.97      0.97        86
          5       0.90      0.95      0.93        59
          6       0.69      0.80      0.74        30
          7       0.97      0.92      0.95        76
          8       0.98      0.94      0.96        63

avg / total       0.94      0.94      0.94       600

escape time :  120.764 s

In [27]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers

t0 = time()

nn = Classifier(
    layers=[
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 1번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 2번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 3번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 4번째 히든레이어
        Layer("Softmax")], # 아웃풋 레이어
    learning_rate=0.0009,
    n_iter=10,
    verbose=2)

nn.fit(trainX, trainY)


# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)

print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"


accuracy score : 0.926666666667
classification report : 
             precision    recall  f1-score   support

          0       0.96      0.96      0.96        67
          1       0.93      0.94      0.93       105
          2       0.95      0.90      0.92        61
          3       0.89      0.92      0.91        53
          4       0.93      0.97      0.95        86
          5       0.88      0.90      0.89        59
          6       0.73      0.80      0.76        30
          7       0.99      0.92      0.95        76
          8       0.98      0.94      0.96        63

avg / total       0.93      0.93      0.93       600

escape time :  159.644 s

In [28]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers

t0 = time()

nn = Classifier(
    layers=[
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 1번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 2번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 3번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 4번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 5번째 히든레이어
        Layer("Softmax")], # 아웃풋 레이어
    learning_rate=0.0009,
    n_iter=10,
    verbose=2)

nn.fit(trainX, trainY)


# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)

print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"


accuracy score : 0.925
classification report : 
             precision    recall  f1-score   support

          0       0.94      0.97      0.96        67
          1       0.94      0.94      0.94       105
          2       0.93      0.93      0.93        61
          3       0.89      0.91      0.90        53
          4       0.96      0.94      0.95        86
          5       0.87      0.90      0.88        59
          6       0.69      0.80      0.74        30
          7       0.97      0.92      0.95        76
          8       0.98      0.92      0.95        63

avg / total       0.93      0.93      0.93       600

escape time :  157.876 s

In [29]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers

t0 = time()

nn = Classifier(
    layers=[
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 1번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 2번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 3번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 4번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 5번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 6번째 히든레이어
        Layer("Softmax")], # 아웃풋 레이어
    learning_rate=0.0009,
    n_iter=10,
    verbose=2)

nn.fit(trainX, trainY)


# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)

print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"


accuracy score : 0.936666666667
classification report : 
             precision    recall  f1-score   support

          0       0.97      0.97      0.97        67
          1       0.93      0.94      0.94       105
          2       0.98      0.93      0.96        61
          3       0.93      0.94      0.93        53
          4       0.95      0.95      0.95        86
          5       0.90      0.97      0.93        59
          6       0.66      0.77      0.71        30
          7       0.99      0.92      0.95        76
          8       0.98      0.94      0.96        63

avg / total       0.94      0.94      0.94       600

escape time :  185.98 s

In [30]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers

t0 = time()

nn = Classifier(
    layers=[
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 1번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 2번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 3번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 4번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 5번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 6번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 7번째 히든레이어
        Layer("Softmax")], # 아웃풋 레이어
    learning_rate=0.0009,
    n_iter=10,
    verbose=2)

nn.fit(trainX, trainY)


# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)

print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"


accuracy score : 0.925
classification report : 
             precision    recall  f1-score   support

          0       0.90      0.96      0.93        67
          1       0.94      0.94      0.94       105
          2       0.89      0.90      0.89        61
          3       0.89      0.91      0.90        53
          4       0.96      0.95      0.96        86
          5       0.92      0.93      0.92        59
          6       0.80      0.80      0.80        30
          7       0.97      0.92      0.95        76
          8       0.95      0.92      0.94        63

avg / total       0.93      0.93      0.93       600

escape time :  230.662 s

In [32]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers

t0 = time()

nn = Classifier(
    layers=[
        Layer("Maxout", units=200, weight_decay=0.0009, pieces=2), # 1번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.0009, pieces=2), # 2번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.0009, pieces=2), # 3번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.0009, pieces=2), # 4번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.0009, pieces=2), # 5번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.0009, pieces=2), # 6번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.0009, pieces=2), # 7번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.0009, pieces=2), # 8번째 히든레이어
        Layer("Softmax")], # 아웃풋 레이어
    learning_rate=0.00009,
    n_iter=10,
    verbose=2)

nn.fit(trainX, trainY)


# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)

print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"


accuracy score : 0.921666666667
classification report : 
             precision    recall  f1-score   support

          0       0.96      0.96      0.96        67
          1       0.92      0.94      0.93       105
          2       0.92      0.92      0.92        61
          3       0.87      0.89      0.88        53
          4       0.96      0.94      0.95        86
          5       0.92      0.92      0.92        59
          6       0.70      0.77      0.73        30
          7       0.97      0.92      0.95        76
          8       0.95      0.94      0.94        63

avg / total       0.92      0.92      0.92       600

escape time :  243.594 s

In [33]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers

t0 = time()

nn = Classifier(
    layers=[
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 1번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 2번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 3번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 4번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 5번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 6번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 7번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 8번째 히든레이어
        Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 9번째 히든레이어
        Layer("Softmax")], # 아웃풋 레이어
    learning_rate=0.00009,
    n_iter=10,
    verbose=2)

nn.fit(trainX, trainY)


# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)

print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"


accuracy score : 0.923333333333
classification report : 
             precision    recall  f1-score   support

          0       0.94      0.94      0.94        67
          1       0.90      0.94      0.92       105
          2       0.89      0.90      0.89        61
          3       0.98      0.89      0.93        53
          4       0.98      0.94      0.96        86
          5       0.88      0.95      0.91        59
          6       0.71      0.80      0.75        30
          7       0.97      0.92      0.95        76
          8       0.98      0.94      0.96        63

avg / total       0.93      0.92      0.92       600

escape time :  278.269 s