In [41]:
from IPython.display import Image
import pickle

# Download and unzip pickled version from here: http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz
(train_x, train_y), (valid_x, valid_y), (test_x, test_y) = pickle.load(open('data/mnist.pkl', 'r'))
print "Shapes:"
print train_x.shape, train_y.shape
print valid_x.shape, valid_y.shape
print test_x.shape, test_y.shape

print "--------------"
print "Example input:"
print train_x[0]
print "Example label:"
print train_y[0]


 Shapes:
(50000, 784) (50000,)
(10000, 784) (10000,)
(10000, 784) (10000,)
--------------
Example input:
[ 0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.01171875
  0.0703125   0.0703125   0.0703125   0.4921875   0.53125     0.68359375
  0.1015625   0.6484375   0.99609375  0.96484375  0.49609375  0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.1171875   0.140625    0.3671875
  0.6015625   0.6640625   0.98828125  0.98828125  0.98828125  0.98828125
  0.98828125  0.87890625  0.671875    0.98828125  0.9453125   0.76171875
  0.25        0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.19140625
  0.9296875   0.98828125  0.98828125  0.98828125  0.98828125  0.98828125
  0.98828125  0.98828125  0.98828125  0.98046875  0.36328125  0.3203125
  0.3203125   0.21875     0.15234375  0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.0703125   0.85546875  0.98828125  0.98828125  0.98828125
  0.98828125  0.98828125  0.7734375   0.7109375   0.96484375  0.94140625
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.3125      0.609375
  0.41796875  0.98828125  0.98828125  0.80078125  0.04296875  0.
  0.16796875  0.6015625   0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.0546875   0.00390625  0.6015625   0.98828125  0.3515625   0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.54296875  0.98828125  0.7421875   0.0078125   0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.04296875  0.7421875   0.98828125  0.2734375   0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.13671875  0.94140625  0.87890625
  0.625       0.421875    0.00390625  0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.31640625
  0.9375      0.98828125  0.98828125  0.46484375  0.09765625  0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.17578125  0.7265625   0.98828125  0.98828125  0.5859375   0.10546875
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.0625      0.36328125  0.984375    0.98828125
  0.73046875  0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.97265625
  0.98828125  0.97265625  0.25        0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.1796875   0.5078125   0.71484375
  0.98828125  0.98828125  0.80859375  0.0078125   0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.15234375  0.578125    0.89453125  0.98828125
  0.98828125  0.98828125  0.9765625   0.7109375   0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.09375     0.4453125   0.86328125  0.98828125  0.98828125
  0.98828125  0.98828125  0.78515625  0.3046875   0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.08984375  0.2578125   0.83203125  0.98828125  0.98828125  0.98828125
  0.98828125  0.7734375   0.31640625  0.0078125   0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.0703125
  0.66796875  0.85546875  0.98828125  0.98828125  0.98828125  0.98828125
  0.76171875  0.3125      0.03515625  0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.21484375
  0.671875    0.8828125   0.98828125  0.98828125  0.98828125  0.98828125
  0.953125    0.51953125  0.04296875  0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.53125     0.98828125  0.98828125  0.98828125  0.828125    0.52734375
  0.515625    0.0625      0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.        ]
Example label:
5

In [42]:
# Show example images - using tile_raster_images helper function from OpenDeep to get 28x28 image from 784 array.
from utils import tile_raster_images
from PIL import Image as pil_img

input_images = train_x[:25]
im = pil_img.fromarray(
    tile_raster_images(input_images, 
                       img_shape=(28, 28), 
                       tile_shape=(1, 25),
                       tile_spacing=(1, 1))
)
im.save("some_mnist_numbers.png")
Image(filename="some_mnist_numbers.png")


Out[42]:

In [43]:
# Your basic Theano imports.
import theano
import theano.tensor as T

x = T.matrix('x')

In [44]:
# Compute the hidden layer from the input
import numpy
import numpy.random as rng

i = numpy.sqrt(6. / (784+500))
# W_x = numpy.asarray(rng.normal(loc=0.0, scale=.05, size=(28*28, 500)), dtype=theano.config.floatX)
W_x = numpy.asarray(rng.uniform(low=-i, high=i, size=(28*28, 500)), dtype=theano.config.floatX)
b_h = numpy.zeros(shape=(500,), dtype=theano.config.floatX)

W_x = theano.shared(W_x, name="W_x")
b_h = theano.shared(b_h, name="b_h")

h = T.tanh(
    T.dot(x, W_x) + b_h
)

In [45]:
# Compute the output class probabilities from the hidden layer
i = numpy.sqrt(6. / (510))
# W_h = numpy.asarray(rng.normal(loc=0.0, scale=.05, size=(500, 10)), dtype=theano.config.floatX)
W_h = numpy.asarray(rng.uniform(low=-i, high=i, size=(500, 10)), dtype=theano.config.floatX)
b_y = numpy.zeros(shape=(10,), dtype="float32")

W_h = theano.shared(W_h, name="W_h")
b_y = theano.shared(b_y, name="b_y")

y = T.nnet.softmax(
    T.dot(h, W_h) + b_y
)

# The actual predicted label
y_hat = T.argmax(y, axis=1)

In [46]:
# Find cost compared to correct labels
correct_labels = T.ivector("labels")

log_likelihood = T.log(y)[T.arange(correct_labels.shape[0]), correct_labels]
cost = -T.mean(log_likelihood)

In [47]:
# Compute gradient updates for the parameters
parameters = [W_x, b_h, W_h, b_y]
gradients = T.grad(cost, parameters)

learning_rate = 0.01
train_updates = [(param, param - learning_rate*gradient) for param, gradient in zip(parameters, gradients)]

In [48]:
# Compile function for training (changes parameters via updates) and testing (no updates)
f_train = theano.function(
    inputs=[x, correct_labels], 
    outputs=cost, 
    updates=train_updates, 
    allow_input_downcast=True
)

f_test = theano.function(
    inputs=[x], 
    outputs=y_hat, 
    allow_input_downcast=True
)

In [49]:
# Main training loop
batch_size = 100
epochs = 300
check_frequency = 3

train_batches = len(train_x) / batch_size
valid_batches = len(valid_x) / batch_size
test_batches = len(test_x) / batch_size

for epoch in range(epochs):
    print epoch+1, ":",
    
    train_costs = []
    train_accuracy = []
    for i in range(train_batches):
        batch_x = train_x[i*batch_size:(i+1)*batch_size]
        batch_labels = train_y[i*batch_size:(i+1)*batch_size]

        costs = f_train(batch_x, batch_labels)
        preds = f_test(batch_x)
        acc = sum(preds==batch_labels)/float(len(batch_labels))
        
        train_costs.append(costs)
        train_accuracy.append(acc)
    print "cost:", numpy.mean(train_costs), "\ttrain:", str(numpy.mean(train_accuracy)*100)+"%",
    
    valid_accuracy = []
    for i in range(valid_batches):
        batch_x = valid_x[i*batch_size:(i+1)*batch_size]
        batch_labels = valid_y[i*batch_size:(i+1)*batch_size]
        
        preds = f_test(batch_x)
        acc = sum(preds==batch_labels)/float(len(batch_labels))
        
        valid_accuracy.append(acc)
    print "\tvalid:", str(numpy.mean(valid_accuracy)*100)+"%",
    
    test_accuracy = []
    for i in range(test_batches):
        batch_x = test_x[i*batch_size:(i+1)*batch_size]
        batch_labels = test_y[i*batch_size:(i+1)*batch_size]
        
        preds = f_test(batch_x)
        acc = sum(preds==batch_labels)/float(len(batch_labels))
        
        test_accuracy.append(acc)
    print "\ttest:", str(numpy.mean(test_accuracy)*100)+"%"
    
    if (epoch+1) % check_frequency == 0:
        print 'saving filters...'
        weight_filters = pil_img.fromarray(
                tile_raster_images(
                    W_x.get_value(borrow=True).T,
                    img_shape=(28, 28),
                    tile_shape=(20, 25),
                    tile_spacing=(1, 1)
                )
            )
        weight_filters.save("mlp_filters_%d.png"%(epoch+1))


1 : cost: 0.910093 	train: 79.788% 	valid: 88.05% 	test: 87.49%
2 : cost: 0.492869 	train: 87.558% 	valid: 89.6% 	test: 88.99%
3 : cost: 0.419445 	train: 88.848% 	valid: 90.26% 	test: 89.9%
saving filters...
4 : cost: 0.38414 	train: 89.536% 	valid: 90.67% 	test: 90.37%
5 : cost: 0.362308 	train: 89.99% 	valid: 91.1% 	test: 90.69%
6 : cost: 0.346996 	train: 90.424% 	valid: 91.34% 	test: 90.95%
saving filters...
7 : cost: 0.335397 	train: 90.724% 	valid: 91.51% 	test: 91.15%
8 : cost: 0.326137 	train: 90.982% 	valid: 91.76% 	test: 91.42%
9 : cost: 0.318454 	train: 91.17% 	valid: 91.89% 	test: 91.65%
saving filters...
10 : cost: 0.311887 	train: 91.354% 	valid: 92.02% 	test: 91.8%
11 : cost: 0.306141 	train: 91.554% 	valid: 92.08% 	test: 91.98%
12 : cost: 0.301014 	train: 91.684% 	valid: 92.2% 	test: 92.04%
saving filters...
13 : cost: 0.296363 	train: 91.804% 	valid: 92.22% 	test: 92.08%
14 : cost: 0.292087 	train: 91.922% 	valid: 92.22% 	test: 92.17%
15 : cost: 0.288107 	train: 92.034% 	valid: 92.31% 	test: 92.23%
saving filters...
16 : cost: 0.284364 	train: 92.126% 	valid: 92.35% 	test: 92.32%
17 : cost: 0.280813 	train: 92.228% 	valid: 92.4% 	test: 92.39%
18 : cost: 0.277417 	train: 92.332% 	valid: 92.45% 	test: 92.44%
saving filters...
19 : cost: 0.274148 	train: 92.442% 	valid: 92.52% 	test: 92.48%
20 : cost: 0.270981 	train: 92.53% 	valid: 92.6% 	test: 92.57%
21 : cost: 0.267898 	train: 92.618% 	valid: 92.64% 	test: 92.67%
saving filters...
22 : cost: 0.264884 	train: 92.734% 	valid: 92.73% 	test: 92.72%
23 : cost: 0.261926 	train: 92.814% 	valid: 92.85% 	test: 92.8%
24 : cost: 0.259013 	train: 92.89% 	valid: 92.93% 	test: 92.93%
saving filters...
25 : cost: 0.256139 	train: 92.984% 	valid: 93.05% 	test: 92.95%
26 : cost: 0.253295 	train: 93.068% 	valid: 93.16% 	test: 92.98%
27 : cost: 0.250478 	train: 93.148% 	valid: 93.22% 	test: 93.08%
saving filters...
28 : cost: 0.247683 	train: 93.214% 	valid: 93.29% 	test: 93.14%
29 : cost: 0.244908 	train: 93.292% 	valid: 93.42% 	test: 93.14%
30 : cost: 0.24215 	train: 93.374% 	valid: 93.53% 	test: 93.21%
saving filters...
31 : cost: 0.239409 	train: 93.462% 	valid: 93.61% 	test: 93.26%
32 : cost: 0.236685 	train: 93.538% 	valid: 93.72% 	test: 93.36%
33 : cost: 0.233977 	train: 93.642% 	valid: 93.79% 	test: 93.39%
saving filters...
34 : cost: 0.231286 	train: 93.712% 	valid: 93.86% 	test: 93.43%
35 : cost: 0.228612 	train: 93.782% 	valid: 93.94% 	test: 93.47%
36 : cost: 0.225956 	train: 93.858% 	valid: 94.06% 	test: 93.55%
saving filters...
37 : cost: 0.223321 	train: 93.916% 	valid: 94.19% 	test: 93.62%
38 : cost: 0.220706 	train: 94.0% 	valid: 94.35% 	test: 93.72%
39 : cost: 0.218114 	train: 94.068% 	valid: 94.43% 	test: 93.84%
saving filters...
40 : cost: 0.215546 	train: 94.152% 	valid: 94.48% 	test: 93.89%
41 : cost: 0.213002 	train: 94.232% 	valid: 94.54% 	test: 93.96%
42 : cost: 0.210484 	train: 94.286% 	valid: 94.61% 	test: 94.05%
saving filters...
43 : cost: 0.207995 	train: 94.368% 	valid: 94.69% 	test: 94.09%
44 : cost: 0.205533 	train: 94.46% 	valid: 94.75% 	test: 94.15%
45 : cost: 0.203102 	train: 94.514% 	valid: 94.81% 	test: 94.22%
saving filters...
46 : cost: 0.2007 	train: 94.576% 	valid: 94.91% 	test: 94.29%
47 : cost: 0.19833 	train: 94.64% 	valid: 94.94% 	test: 94.38%
48 : cost: 0.195992 	train: 94.71% 	valid: 94.99% 	test: 94.45%
saving filters...
49 : cost: 0.193687 	train: 94.764% 	valid: 95.02% 	test: 94.49%
50 : cost: 0.191414 	train: 94.826% 	valid: 95.06% 	test: 94.53%
51 : cost: 0.189174 	train: 94.872% 	valid: 95.13% 	test: 94.56%
saving filters...
52 : cost: 0.186968 	train: 94.924% 	valid: 95.18% 	test: 94.61%
53 : cost: 0.184796 	train: 94.982% 	valid: 95.18% 	test: 94.65%
54 : cost: 0.182657 	train: 95.036% 	valid: 95.24% 	test: 94.68%
saving filters...
55 : cost: 0.180553 	train: 95.088% 	valid: 95.3% 	test: 94.68%
56 : cost: 0.178481 	train: 95.134% 	valid: 95.34% 	test: 94.78%
57 : cost: 0.176444 	train: 95.194% 	valid: 95.38% 	test: 94.83%
saving filters...
58 : cost: 0.174439 	train: 95.24% 	valid: 95.41% 	test: 94.9%
59 : cost: 0.172468 	train: 95.288% 	valid: 95.4% 	test: 94.98%
60 : cost: 0.170529 	train: 95.358% 	valid: 95.44% 	test: 95.01%
saving filters...
61 : cost: 0.168623 	train: 95.422% 	valid: 95.5% 	test: 95.02%
62 : cost: 0.166749 	train: 95.47% 	valid: 95.54% 	test: 95.05%
63 : cost: 0.164906 	train: 95.518% 	valid: 95.62% 	test: 95.07%
saving filters...
64 : cost: 0.163095 	train: 95.568% 	valid: 95.67% 	test: 95.13%
65 : cost: 0.161314 	train: 95.612% 	valid: 95.71% 	test: 95.19%
66 : cost: 0.159564 	train: 95.666% 	valid: 95.72% 	test: 95.21%
saving filters...
67 : cost: 0.157843 	train: 95.706% 	valid: 95.76% 	test: 95.27%
68 : cost: 0.156151 	train: 95.756% 	valid: 95.83% 	test: 95.31%
69 : cost: 0.154488 	train: 95.806% 	valid: 95.87% 	test: 95.31%
saving filters...
70 : cost: 0.152853 	train: 95.854% 	valid: 95.88% 	test: 95.36%
71 : cost: 0.151246 	train: 95.904% 	valid: 95.94% 	test: 95.38%
72 : cost: 0.149666 	train: 95.964% 	valid: 95.96% 	test: 95.46%
saving filters...
73 : cost: 0.148112 	train: 96.016% 	valid: 95.99% 	test: 95.5%
74 : cost: 0.146585 	train: 96.072% 	valid: 96.01% 	test: 95.56%
75 : cost: 0.145083 	train: 96.138% 	valid: 96.05% 	test: 95.6%
saving filters...
76 : cost: 0.143606 	train: 96.18% 	valid: 96.09% 	test: 95.64%
77 : cost: 0.142153 	train: 96.224% 	valid: 96.1% 	test: 95.68%
78 : cost: 0.140724 	train: 96.254% 	valid: 96.12% 	test: 95.74%
saving filters...
79 : cost: 0.139319 	train: 96.306% 	valid: 96.14% 	test: 95.77%
80 : cost: 0.137937 	train: 96.348% 	valid: 96.15% 	test: 95.77%
81 : cost: 0.136577 	train: 96.384% 	valid: 96.16% 	test: 95.81%
saving filters...
82 : cost: 0.13524 	train: 96.42% 	valid: 96.22% 	test: 95.86%
83 : cost: 0.133923 	train: 96.468% 	valid: 96.24% 	test: 95.87%
84 : cost: 0.132628 	train: 96.502% 	valid: 96.3% 	test: 95.93%
saving filters...
85 : cost: 0.131354 	train: 96.544% 	valid: 96.33% 	test: 95.96%
86 : cost: 0.130099 	train: 96.58% 	valid: 96.36% 	test: 96.03%
87 : cost: 0.128864 	train: 96.608% 	valid: 96.38% 	test: 96.07%
saving filters...
88 : cost: 0.127649 	train: 96.658% 	valid: 96.42% 	test: 96.09%
89 : cost: 0.126453 	train: 96.688% 	valid: 96.43% 	test: 96.1%
90 : cost: 0.125275 	train: 96.71% 	valid: 96.44% 	test: 96.14%
saving filters...
91 : cost: 0.124115 	train: 96.742% 	valid: 96.44% 	test: 96.16%
92 : cost: 0.122972 	train: 96.79% 	valid: 96.5% 	test: 96.18%
93 : cost: 0.121848 	train: 96.822% 	valid: 96.51% 	test: 96.18%
saving filters...
94 : cost: 0.120739 	train: 96.854% 	valid: 96.52% 	test: 96.2%
95 : cost: 0.119648 	train: 96.892% 	valid: 96.53% 	test: 96.22%
96 : cost: 0.118573 	train: 96.93% 	valid: 96.56% 	test: 96.26%
saving filters...
97 : cost: 0.117514 	train: 96.954% 	valid: 96.58% 	test: 96.29%
98 : cost: 0.11647 	train: 96.97% 	valid: 96.58% 	test: 96.3%
99 : cost: 0.115442 	train: 96.994% 	valid: 96.6% 	test: 96.33%
saving filters...
100 : cost: 0.114428 	train: 97.022% 	valid: 96.61% 	test: 96.33%
101 : cost: 0.113429 	train: 97.062% 	valid: 96.62% 	test: 96.33%
102 : cost: 0.112444 	train: 97.092% 	valid: 96.61% 	test: 96.36%
saving filters...
103 : cost: 0.111474 	train: 97.136% 	valid: 96.64% 	test: 96.37%
104 : cost: 0.110517 	train: 97.154% 	valid: 96.69% 	test: 96.39%
105 : cost: 0.109573 	train: 97.18% 	valid: 96.72% 	test: 96.4%
saving filters...
106 : cost: 0.108643 	train: 97.208% 	valid: 96.73% 	test: 96.44%
107 : cost: 0.107725 	train: 97.246% 	valid: 96.74% 	test: 96.45%
108 : cost: 0.10682 	train: 97.27% 	valid: 96.74% 	test: 96.48%
saving filters...
109 : cost: 0.105928 	train: 97.288% 	valid: 96.76% 	test: 96.5%
110 : cost: 0.105048 	train: 97.322% 	valid: 96.78% 	test: 96.53%
111 : cost: 0.104179 	train: 97.338% 	valid: 96.8% 	test: 96.56%
saving filters...
112 : cost: 0.103322 	train: 97.364% 	valid: 96.8% 	test: 96.59%
113 : cost: 0.102477 	train: 97.386% 	valid: 96.8% 	test: 96.63%
114 : cost: 0.101643 	train: 97.41% 	valid: 96.81% 	test: 96.64%
saving filters...
115 : cost: 0.10082 	train: 97.438% 	valid: 96.82% 	test: 96.65%
116 : cost: 0.100007 	train: 97.46% 	valid: 96.84% 	test: 96.66%
117 : cost: 0.0992054 	train: 97.476% 	valid: 96.85% 	test: 96.67%
saving filters...
118 : cost: 0.0984139 	train: 97.498% 	valid: 96.87% 	test: 96.68%
119 : cost: 0.0976325 	train: 97.532% 	valid: 96.89% 	test: 96.68%
120 : cost: 0.0968612 	train: 97.546% 	valid: 96.89% 	test: 96.69%
saving filters...
121 : cost: 0.0960996 	train: 97.558% 	valid: 96.9% 	test: 96.7%
122 : cost: 0.0953476 	train: 97.592% 	valid: 96.91% 	test: 96.7%
123 : cost: 0.094605 	train: 97.612% 	valid: 96.92% 	test: 96.72%
saving filters...
124 : cost: 0.0938718 	train: 97.63% 	valid: 96.94% 	test: 96.74%
125 : cost: 0.0931474 	train: 97.652% 	valid: 96.95% 	test: 96.74%
126 : cost: 0.0924321 	train: 97.668% 	valid: 96.98% 	test: 96.74%
saving filters...
127 : cost: 0.0917255 	train: 97.684% 	valid: 96.99% 	test: 96.74%
128 : cost: 0.0910275 	train: 97.706% 	valid: 97.0% 	test: 96.77%
129 : cost: 0.0903379 	train: 97.724% 	valid: 97.01% 	test: 96.79%
saving filters...
130 : cost: 0.0896565 	train: 97.736% 	valid: 97.02% 	test: 96.79%
131 : cost: 0.0889833 	train: 97.756% 	valid: 97.02% 	test: 96.79%
132 : cost: 0.0883181 	train: 97.772% 	valid: 97.03% 	test: 96.8%
saving filters...
133 : cost: 0.0876607 	train: 97.786% 	valid: 97.07% 	test: 96.8%
134 : cost: 0.087011 	train: 97.804% 	valid: 97.08% 	test: 96.81%
135 : cost: 0.0863689 	train: 97.812% 	valid: 97.09% 	test: 96.82%
saving filters...
136 : cost: 0.0857343 	train: 97.824% 	valid: 97.12% 	test: 96.83%
137 : cost: 0.085107 	train: 97.842% 	valid: 97.12% 	test: 96.86%
138 : cost: 0.0844868 	train: 97.86% 	valid: 97.13% 	test: 96.88%
saving filters...
139 : cost: 0.0838737 	train: 97.884% 	valid: 97.12% 	test: 96.9%
140 : cost: 0.0832676 	train: 97.898% 	valid: 97.11% 	test: 96.91%
141 : cost: 0.0826682 	train: 97.906% 	valid: 97.11% 	test: 96.94%
saving filters...
142 : cost: 0.0820756 	train: 97.916% 	valid: 97.12% 	test: 96.93%
143 : cost: 0.0814896 	train: 97.93% 	valid: 97.14% 	test: 96.93%
144 : cost: 0.0809101 	train: 97.948% 	valid: 97.15% 	test: 96.96%
saving filters...
145 : cost: 0.080337 	train: 97.966% 	valid: 97.15% 	test: 96.97%
146 : cost: 0.0797701 	train: 97.99% 	valid: 97.16% 	test: 96.97%
147 : cost: 0.0792095 	train: 98.012% 	valid: 97.18% 	test: 96.98%
saving filters...
148 : cost: 0.0786549 	train: 98.03% 	valid: 97.19% 	test: 96.98%
149 : cost: 0.0781063 	train: 98.058% 	valid: 97.21% 	test: 97.0%
150 : cost: 0.0775635 	train: 98.074% 	valid: 97.21% 	test: 97.01%
saving filters...
151 : cost: 0.0770266 	train: 98.082% 	valid: 97.21% 	test: 97.02%
152 : cost: 0.0764954 	train: 98.096% 	valid: 97.21% 	test: 97.02%
153 : cost: 0.0759698 	train: 98.12% 	valid: 97.22% 	test: 97.02%
saving filters...
154 : cost: 0.0754497 	train: 98.134% 	valid: 97.22% 	test: 97.03%
155 : cost: 0.0749351 	train: 98.144% 	valid: 97.22% 	test: 97.04%
156 : cost: 0.0744257 	train: 98.166% 	valid: 97.24% 	test: 97.06%
saving filters...
157 : cost: 0.0739218 	train: 98.184% 	valid: 97.24% 	test: 97.06%
158 : cost: 0.0734229 	train: 98.208% 	valid: 97.27% 	test: 97.07%
159 : cost: 0.0729292 	train: 98.224% 	valid: 97.28% 	test: 97.07%
saving filters...
160 : cost: 0.0724405 	train: 98.234% 	valid: 97.29% 	test: 97.08%
161 : cost: 0.0719567 	train: 98.246% 	valid: 97.3% 	test: 97.1%
162 : cost: 0.0714778 	train: 98.262% 	valid: 97.29% 	test: 97.12%
saving filters...
163 : cost: 0.0710037 	train: 98.27% 	valid: 97.3% 	test: 97.12%
164 : cost: 0.0705344 	train: 98.284% 	valid: 97.32% 	test: 97.13%
165 : cost: 0.0700698 	train: 98.292% 	valid: 97.31% 	test: 97.13%
saving filters...
166 : cost: 0.0696097 	train: 98.306% 	valid: 97.3% 	test: 97.14%
167 : cost: 0.0691542 	train: 98.314% 	valid: 97.29% 	test: 97.15%
168 : cost: 0.0687031 	train: 98.328% 	valid: 97.31% 	test: 97.16%
saving filters...
169 : cost: 0.0682565 	train: 98.344% 	valid: 97.32% 	test: 97.16%
170 : cost: 0.0678142 	train: 98.344% 	valid: 97.33% 	test: 97.17%
171 : cost: 0.0673761 	train: 98.348% 	valid: 97.34% 	test: 97.17%
saving filters...
172 : cost: 0.0669422 	train: 98.366% 	valid: 97.34% 	test: 97.17%
173 : cost: 0.0665125 	train: 98.382% 	valid: 97.33% 	test: 97.17%
174 : cost: 0.0660869 	train: 98.39% 	valid: 97.33% 	test: 97.18%
saving filters...
175 : cost: 0.0656654 	train: 98.406% 	valid: 97.33% 	test: 97.18%
176 : cost: 0.0652478 	train: 98.418% 	valid: 97.33% 	test: 97.16%
177 : cost: 0.0648342 	train: 98.43% 	valid: 97.33% 	test: 97.17%
saving filters...
178 : cost: 0.0644244 	train: 98.442% 	valid: 97.35% 	test: 97.17%
179 : cost: 0.0640184 	train: 98.458% 	valid: 97.35% 	test: 97.17%
180 : cost: 0.0636162 	train: 98.466% 	valid: 97.34% 	test: 97.18%
saving filters...
181 : cost: 0.0632178 	train: 98.48% 	valid: 97.34% 	test: 97.18%
182 : cost: 0.0628229 	train: 98.494% 	valid: 97.37% 	test: 97.19%
183 : cost: 0.0624317 	train: 98.508% 	valid: 97.37% 	test: 97.2%
saving filters...
184 : cost: 0.0620441 	train: 98.53% 	valid: 97.37% 	test: 97.21%
185 : cost: 0.06166 	train: 98.538% 	valid: 97.37% 	test: 97.23%
186 : cost: 0.0612793 	train: 98.55% 	valid: 97.36% 	test: 97.24%
saving filters...
187 : cost: 0.0609021 	train: 98.568% 	valid: 97.36% 	test: 97.24%
188 : cost: 0.0605283 	train: 98.582% 	valid: 97.36% 	test: 97.26%
189 : cost: 0.0601578 	train: 98.6% 	valid: 97.37% 	test: 97.27%
saving filters...
190 : cost: 0.0597905 	train: 98.606% 	valid: 97.37% 	test: 97.28%
191 : cost: 0.0594266 	train: 98.62% 	valid: 97.38% 	test: 97.29%
192 : cost: 0.0590659 	train: 98.624% 	valid: 97.38% 	test: 97.3%
saving filters...
193 : cost: 0.0587082 	train: 98.628% 	valid: 97.38% 	test: 97.3%
194 : cost: 0.0583538 	train: 98.632% 	valid: 97.38% 	test: 97.3%
195 : cost: 0.0580024 	train: 98.634% 	valid: 97.39% 	test: 97.31%
saving filters...
196 : cost: 0.057654 	train: 98.64% 	valid: 97.38% 	test: 97.3%
197 : cost: 0.0573087 	train: 98.644% 	valid: 97.39% 	test: 97.31%
198 : cost: 0.0569663 	train: 98.658% 	valid: 97.38% 	test: 97.33%
saving filters...
199 : cost: 0.0566269 	train: 98.666% 	valid: 97.38% 	test: 97.33%
200 : cost: 0.0562904 	train: 98.674% 	valid: 97.39% 	test: 97.34%
201 : cost: 0.0559567 	train: 98.688% 	valid: 97.39% 	test: 97.34%
saving filters...
202 : cost: 0.0556259 	train: 98.704% 	valid: 97.39% 	test: 97.35%
203 : cost: 0.0552979 	train: 98.72% 	valid: 97.4% 	test: 97.35%
204 : cost: 0.0549725 	train: 98.738% 	valid: 97.41% 	test: 97.36%
saving filters...
205 : cost: 0.05465 	train: 98.742% 	valid: 97.41% 	test: 97.36%
206 : cost: 0.0543301 	train: 98.744% 	valid: 97.41% 	test: 97.37%
207 : cost: 0.0540128 	train: 98.756% 	valid: 97.41% 	test: 97.37%
saving filters...
208 : cost: 0.0536982 	train: 98.762% 	valid: 97.42% 	test: 97.38%
209 : cost: 0.0533862 	train: 98.768% 	valid: 97.43% 	test: 97.4%
210 : cost: 0.0530767 	train: 98.778% 	valid: 97.44% 	test: 97.41%
saving filters...
211 : cost: 0.0527698 	train: 98.786% 	valid: 97.46% 	test: 97.41%
212 : cost: 0.0524655 	train: 98.792% 	valid: 97.46% 	test: 97.41%
213 : cost: 0.0521635 	train: 98.804% 	valid: 97.46% 	test: 97.41%
saving filters...
214 : cost: 0.051864 	train: 98.814% 	valid: 97.46% 	test: 97.41%
215 : cost: 0.0515669 	train: 98.816% 	valid: 97.46% 	test: 97.41%
216 : cost: 0.0512722 	train: 98.824% 	valid: 97.46% 	test: 97.41%
saving filters...
217 : cost: 0.0509799 	train: 98.838% 	valid: 97.46% 	test: 97.4%
218 : cost: 0.0506899 	train: 98.842% 	valid: 97.45% 	test: 97.4%
219 : cost: 0.0504022 	train: 98.852% 	valid: 97.46% 	test: 97.41%
saving filters...
220 : cost: 0.0501168 	train: 98.86% 	valid: 97.46% 	test: 97.43%
221 : cost: 0.0498336 	train: 98.876% 	valid: 97.47% 	test: 97.43%
222 : cost: 0.0495527 	train: 98.888% 	valid: 97.48% 	test: 97.43%
saving filters...
223 : cost: 0.0492739 	train: 98.894% 	valid: 97.49% 	test: 97.44%
224 : cost: 0.0489974 	train: 98.904% 	valid: 97.5% 	test: 97.44%
225 : cost: 0.048723 	train: 98.914% 	valid: 97.51% 	test: 97.44%
saving filters...
226 : cost: 0.0484507 	train: 98.926% 	valid: 97.52% 	test: 97.45%
227 : cost: 0.0481805 	train: 98.942% 	valid: 97.52% 	test: 97.45%
228 : cost: 0.0479125 	train: 98.946% 	valid: 97.53% 	test: 97.45%
saving filters...
229 : cost: 0.0476465 	train: 98.956% 	valid: 97.54% 	test: 97.47%
230 : cost: 0.0473825 	train: 98.97% 	valid: 97.54% 	test: 97.47%
231 : cost: 0.0471206 	train: 98.98% 	valid: 97.54% 	test: 97.48%
saving filters...
232 : cost: 0.0468606 	train: 98.99% 	valid: 97.54% 	test: 97.5%
233 : cost: 0.0466026 	train: 98.996% 	valid: 97.54% 	test: 97.5%
234 : cost: 0.0463466 	train: 99.004% 	valid: 97.54% 	test: 97.51%
saving filters...
235 : cost: 0.0460925 	train: 99.014% 	valid: 97.53% 	test: 97.51%
236 : cost: 0.0458403 	train: 99.02% 	valid: 97.52% 	test: 97.52%
237 : cost: 0.0455901 	train: 99.026% 	valid: 97.52% 	test: 97.52%
saving filters...
238 : cost: 0.0453416 	train: 99.032% 	valid: 97.52% 	test: 97.52%
239 : cost: 0.0450951 	train: 99.04% 	valid: 97.52% 	test: 97.52%
240 : cost: 0.0448504 	train: 99.048% 	valid: 97.52% 	test: 97.52%
saving filters...
241 : cost: 0.0446075 	train: 99.06% 	valid: 97.52% 	test: 97.52%
242 : cost: 0.0443663 	train: 99.064% 	valid: 97.52% 	test: 97.53%
243 : cost: 0.044127 	train: 99.066% 	valid: 97.52% 	test: 97.54%
saving filters...
244 : cost: 0.0438895 	train: 99.072% 	valid: 97.53% 	test: 97.54%
245 : cost: 0.0436537 	train: 99.082% 	valid: 97.54% 	test: 97.54%
246 : cost: 0.0434195 	train: 99.088% 	valid: 97.53% 	test: 97.54%
saving filters...
247 : cost: 0.0431871 	train: 99.094% 	valid: 97.54% 	test: 97.54%
248 : cost: 0.0429565 	train: 99.1% 	valid: 97.55% 	test: 97.54%
249 : cost: 0.0427274 	train: 99.108% 	valid: 97.55% 	test: 97.54%
saving filters...
250 : cost: 0.0425001 	train: 99.11% 	valid: 97.56% 	test: 97.55%
251 : cost: 0.0422743 	train: 99.112% 	valid: 97.56% 	test: 97.55%
252 : cost: 0.0420502 	train: 99.114% 	valid: 97.56% 	test: 97.56%
saving filters...
253 : cost: 0.0418277 	train: 99.122% 	valid: 97.56% 	test: 97.57%
254 : cost: 0.0416068 	train: 99.132% 	valid: 97.56% 	test: 97.57%
255 : cost: 0.0413875 	train: 99.138% 	valid: 97.58% 	test: 97.58%
saving filters...
256 : cost: 0.0411697 	train: 99.146% 	valid: 97.59% 	test: 97.58%
257 : cost: 0.0409535 	train: 99.154% 	valid: 97.59% 	test: 97.61%
258 : cost: 0.0407388 	train: 99.162% 	valid: 97.6% 	test: 97.61%
saving filters...
259 : cost: 0.0405257 	train: 99.166% 	valid: 97.6% 	test: 97.6%
260 : cost: 0.040314 	train: 99.17% 	valid: 97.6% 	test: 97.6%
261 : cost: 0.0401038 	train: 99.174% 	valid: 97.61% 	test: 97.6%
saving filters...
262 : cost: 0.0398951 	train: 99.184% 	valid: 97.61% 	test: 97.6%
263 : cost: 0.0396879 	train: 99.19% 	valid: 97.61% 	test: 97.6%
264 : cost: 0.0394821 	train: 99.194% 	valid: 97.61% 	test: 97.6%
saving filters...
265 : cost: 0.0392777 	train: 99.204% 	valid: 97.62% 	test: 97.6%
266 : cost: 0.0390748 	train: 99.206% 	valid: 97.62% 	test: 97.61%
267 : cost: 0.0388732 	train: 99.214% 	valid: 97.62% 	test: 97.61%
saving filters...
268 : cost: 0.0386731 	train: 99.22% 	valid: 97.62% 	test: 97.61%
269 : cost: 0.0384743 	train: 99.23% 	valid: 97.64% 	test: 97.61%
270 : cost: 0.0382769 	train: 99.248% 	valid: 97.64% 	test: 97.61%
saving filters...
271 : cost: 0.0380808 	train: 99.25% 	valid: 97.64% 	test: 97.61%
272 : cost: 0.0378862 	train: 99.252% 	valid: 97.63% 	test: 97.62%
273 : cost: 0.0376928 	train: 99.272% 	valid: 97.63% 	test: 97.62%
saving filters...
274 : cost: 0.0375007 	train: 99.274% 	valid: 97.64% 	test: 97.63%
275 : cost: 0.03731 	train: 99.278% 	valid: 97.64% 	test: 97.63%
276 : cost: 0.0371205 	train: 99.28% 	valid: 97.64% 	test: 97.63%
saving filters...
277 : cost: 0.0369324 	train: 99.29% 	valid: 97.65% 	test: 97.63%
278 : cost: 0.0367454 	train: 99.294% 	valid: 97.65% 	test: 97.63%
279 : cost: 0.0365598 	train: 99.3% 	valid: 97.65% 	test: 97.64%
saving filters...
280 : cost: 0.0363754 	train: 99.306% 	valid: 97.65% 	test: 97.64%
281 : cost: 0.0361922 	train: 99.31% 	valid: 97.64% 	test: 97.65%
282 : cost: 0.0360103 	train: 99.316% 	valid: 97.65% 	test: 97.65%
saving filters...
283 : cost: 0.0358296 	train: 99.32% 	valid: 97.65% 	test: 97.65%
284 : cost: 0.0356501 	train: 99.322% 	valid: 97.65% 	test: 97.65%
285 : cost: 0.0354718 	train: 99.324% 	valid: 97.65% 	test: 97.65%
saving filters...
286 : cost: 0.0352947 	train: 99.334% 	valid: 97.65% 	test: 97.66%
287 : cost: 0.0351187 	train: 99.334% 	valid: 97.66% 	test: 97.66%
288 : cost: 0.0349439 	train: 99.338% 	valid: 97.66% 	test: 97.66%
saving filters...
289 : cost: 0.0347703 	train: 99.34% 	valid: 97.66% 	test: 97.68%
290 : cost: 0.0345977 	train: 99.342% 	valid: 97.66% 	test: 97.69%
291 : cost: 0.0344264 	train: 99.346% 	valid: 97.66% 	test: 97.69%
saving filters...
292 : cost: 0.0342562 	train: 99.356% 	valid: 97.66% 	test: 97.69%
293 : cost: 0.0340871 	train: 99.358% 	valid: 97.66% 	test: 97.69%
294 : cost: 0.033919 	train: 99.362% 	valid: 97.66% 	test: 97.72%
saving filters...
295 : cost: 0.0337521 	train: 99.368% 	valid: 97.67% 	test: 97.72%
296 : cost: 0.0335863 	train: 99.37% 	valid: 97.67% 	test: 97.73%
297 : cost: 0.0334215 	train: 99.378% 	valid: 97.67% 	test: 97.73%
saving filters...
298 : cost: 0.0332579 	train: 99.388% 	valid: 97.67% 	test: 97.74%
299 : cost: 0.0330952 	train: 99.39% 	valid: 97.67% 	test: 97.74%
300 : cost: 0.0329337 	train: 99.392% 	valid: 97.68% 	test: 97.75%
saving filters...

In [ ]: