In [1]:
from sklearn.grid_search import GridSearchCV
from sklearn.cross_validation import train_test_split
from sklearn.metrics import classification_report
from sklearn.externals import joblib
from sklearn.metrics import accuracy_score
from sklearn.metrics import confusion_matrix
from sknn.mlp import Classifier, Layer
from sknn.mlp import Classifier, Convolution, FastVectorSpace, Layer, MultiLayerPerceptron
import numpy as np
from time import time
from glob import glob
import os
In [2]:
np.set_printoptions(precision=4)
np.set_printoptions(suppress=True)
In [3]:
### Plan2 ###
In [3]:
features = joblib.load("./mldata/features_1200.mat")
labels = joblib.load("./mldata/lables_1200.mat")
features = np.array(features, 'int16')
labels = np.array(labels, 'int')
t0 = time()
def scale(X, eps = 0.001):
# scale the data points s.t the columns of the feature space
# (i.e the predictors) are within the range [0, 1]
return (X - np.min(X, axis = 0)) / (np.max(X, axis = 0) + eps)
features = features.astype("float32")
features = scale(features)
print "escape time : ", round(time()-t0, 3), "s"
# scale the data to the range [0, 1] and then construct the training
# and testing splits
(trainX, testX, trainY, testY) = train_test_split(features, labels, test_size = 0.1)
print "the shape of training set %s rows, %s columns" %(trainX.shape[0], trainX.shape[1])
print "the shape of test set %s rows, %s columns" %(testX.shape[0], testX.shape[1])
print "the range of training set : %s ~ %s" %(trainX.min(),trainX.max())
print "the range of test set : %s ~ %s" %(testX.min(),testX.max())
In [10]:
# Classification
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
nn = Classifier(
layers=[
Layer("Rectifier", units=300),
Layer("Softmax")],
learning_rate=0.01,
n_iter=10)
nn.fit(trainX, trainY)
# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
In [11]:
# Classification
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
nn = Classifier(
layers=[
Layer("Sigmoid", units=300),
Layer("Softmax")],
learning_rate=0.01,
n_iter=10)
nn.fit(trainX, trainY)
# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
In [14]:
# Classification
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
nn = Classifier(
layers=[
Layer("Tanh", units=300),
Layer("Softmax")],
learning_rate=0.01,
n_iter=10)
nn.fit(trainX, trainY)
# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
In [13]:
# Classification
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
nn = Classifier(
layers=[
Layer("Maxout", units=300, pieces=2),
Layer("Softmax")],
learning_rate=0.01,
n_iter=10)
nn.fit(trainX, trainY)
# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
In [6]:
# Grid Search
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
nn = Regressor(
layers=[
Layer("Rectifier", units=300),# 첫번째 히든레이어
Layer("Rectifier", units=300),# 두번째 히든레이어
Layer("Softmax")], # 아웃풋 레이어
verbose=1)
gs = GridSearchCV(nn, param_grid={
'learning_rate': [0.01, 0.05],
'n_iter' : [10, 20],
'hidden0__units': [300, 400],
'hidden0__type': ["Rectifier", "Tanh"], # 첫번째 히든레이어
'hidden1__units': [300, 400],
'hidden1__type': ["Rectifier", "Tanh"]}) # 두번째 히든레이어
gs.fit(trainX, trainY)
# compute the predictions for the test data and show a classification report
preds = gs.predict(testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
In [4]:
# Grid Search
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
t0 = time()
nn = Classifier(
layers=[
Layer("Rectifier", units=300), # 첫번째 히든레이어
Layer("Softmax")], # 아웃풋 레이어
verbose=2)
gs = GridSearchCV(nn, param_grid={
'learning_rate': [0.009],
'n_iter' : [10],
'hidden0__units': [100, 200, 300],
'hidden0__weight_decay' : [0.9, 0.09, 0.009, 0.0009, 0.00009],
'hidden0__type': ["Rectifier", "Tanh"] # 첫번째 히든레이어
})
gs.fit(trainX, trainY)
# compute the predictions for the test data and show a classification report
preds = gs.predict(testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print gs.best_estimator_
print "escape time : ", round(time()-t0, 3), "s"
In [10]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
t0 = time()
nn = Classifier(
layers=[
Layer("Rectifier", units=200, weight_decay=0.00009), # 첫번째 히든레이어
Layer("Softmax")], # 아웃풋 레이어
learning_rate=0.009,
n_iter=10,
verbose=2)
nn.fit(trainX, trainY)
# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"
In [11]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
t0 = time()
nn = Classifier(
layers=[
Layer("Rectifier", units=200, weight_decay=0.00009), # 첫번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 두번째 히든레이어
Layer("Softmax")], # 아웃풋 레이어
learning_rate=0.009,
n_iter=10,
verbose=2)
nn.fit(trainX, trainY)
# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"
In [12]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
t0 = time()
nn = Classifier(
layers=[
Layer("Rectifier", units=200, weight_decay=0.00009), # 첫번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 두번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 세번째 히든레이어
Layer("Softmax")], # 아웃풋 레이어
learning_rate=0.009,
n_iter=10,
verbose=2)
nn.fit(trainX, trainY)
# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"
In [13]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
t0 = time()
nn = Classifier(
layers=[
Layer("Rectifier", units=200, weight_decay=0.00009), # 첫번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 두번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 세번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 네번째 히든레이어
Layer("Softmax")], # 아웃풋 레이어
learning_rate=0.009,
n_iter=10,
verbose=2)
nn.fit(trainX, trainY)
# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"
In [14]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
t0 = time()
nn = Classifier(
layers=[
Layer("Rectifier", units=200, weight_decay=0.00009), # 첫번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 두번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 세번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 네번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 다섯번째 히든레이어
Layer("Softmax")], # 아웃풋 레이어
learning_rate=0.009,
n_iter=10,
verbose=2)
nn.fit(trainX, trainY)
# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"
In [15]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
t0 = time()
nn = Classifier(
layers=[
Layer("Rectifier", units=200, weight_decay=0.00009), # 첫번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 두번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 세번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 네번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 다섯번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 여섯번째 히든레이어
Layer("Softmax")], # 아웃풋 레이어
learning_rate=0.009,
n_iter=10,
verbose=2)
nn.fit(trainX, trainY)
# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"
In [16]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
t0 = time()
nn = Classifier(
layers=[
Layer("Rectifier", units=200, weight_decay=0.00009), # 첫번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 두번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 세번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 네번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 다섯번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 여섯번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 7번째 히든레이어
Layer("Softmax")], # 아웃풋 레이어
learning_rate=0.009,
n_iter=10,
verbose=2)
nn.fit(trainX, trainY)
# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"
In [17]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
t0 = time()
nn = Classifier(
layers=[
Layer("Rectifier", units=200, weight_decay=0.00009), # 첫번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 두번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 세번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 네번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 다섯번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 여섯번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 7번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 8번째 히든레이어
Layer("Softmax")], # 아웃풋 레이어
learning_rate=0.009,
n_iter=10,
verbose=2)
nn.fit(trainX, trainY)
# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"
In [19]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
t0 = time()
nn = Classifier(
layers=[
Layer("Rectifier", units=200, weight_decay=0.00009), # 첫번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 두번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 세번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 네번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 다섯번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 여섯번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 7번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 8번째 히든레이어
Layer("Rectifier", units=200, weight_decay=0.00009), # 9번째 히든레이어
Layer("Softmax")], # 아웃풋 레이어
learning_rate=0.009,
n_iter=10,
verbose=2)
nn.fit(trainX, trainY)
# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"
In [20]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
t0 = time()
nn = Classifier(
layers=[
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 첫번째 히든레이어
Layer("Softmax")], # 아웃풋 레이어
learning_rate=0.009,
n_iter=10,
verbose=2)
nn.fit(trainX, trainY)
# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"
In [23]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
t0 = time()
nn = Classifier(
layers=[
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 1번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 2번째 히든레이어
Layer("Softmax")], # 아웃풋 레이어
learning_rate=0.009,
n_iter=10,
verbose=2)
nn.fit(trainX, trainY)
# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"
In [26]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
t0 = time()
nn = Classifier(
layers=[
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 1번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 2번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 3번째 히든레이어
Layer("Softmax")], # 아웃풋 레이어
learning_rate=0.0009,
n_iter=10,
verbose=2)
nn.fit(trainX, trainY)
# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"
In [27]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
t0 = time()
nn = Classifier(
layers=[
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 1번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 2번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 3번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 4번째 히든레이어
Layer("Softmax")], # 아웃풋 레이어
learning_rate=0.0009,
n_iter=10,
verbose=2)
nn.fit(trainX, trainY)
# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"
In [28]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
t0 = time()
nn = Classifier(
layers=[
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 1번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 2번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 3번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 4번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 5번째 히든레이어
Layer("Softmax")], # 아웃풋 레이어
learning_rate=0.0009,
n_iter=10,
verbose=2)
nn.fit(trainX, trainY)
# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"
In [29]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
t0 = time()
nn = Classifier(
layers=[
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 1번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 2번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 3번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 4번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 5번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 6번째 히든레이어
Layer("Softmax")], # 아웃풋 레이어
learning_rate=0.0009,
n_iter=10,
verbose=2)
nn.fit(trainX, trainY)
# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"
In [30]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
t0 = time()
nn = Classifier(
layers=[
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 1번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 2번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 3번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 4번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 5번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 6번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 7번째 히든레이어
Layer("Softmax")], # 아웃풋 레이어
learning_rate=0.0009,
n_iter=10,
verbose=2)
nn.fit(trainX, trainY)
# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"
In [32]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
t0 = time()
nn = Classifier(
layers=[
Layer("Maxout", units=200, weight_decay=0.0009, pieces=2), # 1번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.0009, pieces=2), # 2번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.0009, pieces=2), # 3번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.0009, pieces=2), # 4번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.0009, pieces=2), # 5번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.0009, pieces=2), # 6번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.0009, pieces=2), # 7번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.0009, pieces=2), # 8번째 히든레이어
Layer("Softmax")], # 아웃풋 레이어
learning_rate=0.00009,
n_iter=10,
verbose=2)
nn.fit(trainX, trainY)
# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"
In [33]:
# Rectifier, Sigmoid, Tanh, and Maxout for non-linear layers
# Linear, Softmax or Gaussian for linear layers
t0 = time()
nn = Classifier(
layers=[
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 1번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 2번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 3번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 4번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 5번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 6번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 7번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 8번째 히든레이어
Layer("Maxout", units=200, weight_decay=0.00009, pieces=2), # 9번째 히든레이어
Layer("Softmax")], # 아웃풋 레이어
learning_rate=0.00009,
n_iter=10,
verbose=2)
nn.fit(trainX, trainY)
# compute the predictions for the test data and show a classification report
preds = nn.predict(testX)
print "accuracy score : %s" %(accuracy_score(testY, preds))
print "classification report : "
print classification_report(testY, preds)
print "escape time : ", round(time()-t0, 3), "s"