In [10]:
%matplotlib inline
import numpy as np
import matplotlib.pyplot as plt
from IPython.html.widgets import interact
import sknn.mlp as neurnet
from IPython.display import Image

from sklearn.datasets import load_digits
digits = load_digits()

trainingdata = digits.data[0:1200]
traininganswers = digits.target[0:1200]
lc = 0.02

#convert the integer answers into a 10-dimension array
traininganswervectors = np.zeros((1200,10))
for n in range(1200):
    traininganswervectors[n][digits.target[n]] = 1
    
testdata = digits.data[1200:1700]
testanswers = digits.target[1200:1700]

testanswervectors = np.zeros((500,10))
for n in range(500):
    testanswervectors[n][digits.target[n + 1200]] = 1
    
    
trainingtuples = np.array(zip(trainingdata, traininganswervectors))
testtuples = np.array(zip(testdata, testanswervectors))

In [11]:
def accuracy(inputs, results, answers):
    correct = 0
    binresults = results
    for n in range(0, len(results)):
        #converts the output into a binary y/n for each digit
        for n2 in range(len(results[n])):
            if results[n][n2] == max(results[n]):
                binresults[n][n2] = 1
            else:
                binresults[n][n2] = 0
        
        if np.array_equal(answers[n], binresults[n]):
            correct += 1
    return correct / len(results)

In [12]:
Image(url="http://upload.wikimedia.org/wikipedia/commons/thumb/6/6f/Gjl-t%28x%29.svg/700px-Gjl-t%28x%29.svg.png")


Out[12]:

In [41]:
nn = neurnet.Regressor(
    layers=[
        neurnet.Layer("Sigmoid", units=64),
        neurnet.Layer("Sigmoid", units=80),
        neurnet.Layer("Sigmoid", units=10)],
    learning_rate=0.02,
    n_iter=100,
    batch_size=5)

%timeit -n 1 nn.fit(trainingdata, traininganswervectors)

y_valid = nn.predict(testdata)
print(accuracy(testdata, y_valid, testanswervectors))


1 loops, best of 3: 9.57 s per loop
0.892

In [54]:
nn = neurnet.Regressor(
    layers=[
        neurnet.Layer("Tanh", units=64),
        neurnet.Layer("Tanh", units=80),
        neurnet.Layer("Tanh", units=10)],
    learning_rate=0.02,
    n_iter=100,
    batch_size=5)

%timeit -n 1 nn.fit(trainingdata, traininganswervectors)

y_valid = nn.predict(testdata)
print(accuracy(testdata, y_valid, testanswervectors))


ERROR:sknn:
A runtime exception was caught during training. This likely occurred due to
a divergence of the SGD algorithm, and NaN floats were found by PyLearn2.

Try setting the `learning_rate` 10x lower to resolve this, for example:
    learning_rate=0.000000

---------------------------------------------------------------------------
NotImplementedError                       Traceback (most recent call last)
<ipython-input-54-ef2a26faa326> in <module>()
      8     batch_size=5)
      9 
---> 10 get_ipython().magic('timeit -n 1 nn.fit(trainingdata, traininganswervectors)')
     11 
     12 y_valid = nn.predict(testdata)

/usr/local/lib/python3.4/dist-packages/IPython/core/interactiveshell.py in magic(self, arg_s)
   2305         magic_name, _, magic_arg_s = arg_s.partition(' ')
   2306         magic_name = magic_name.lstrip(prefilter.ESC_MAGIC)
-> 2307         return self.run_line_magic(magic_name, magic_arg_s)
   2308 
   2309     #-------------------------------------------------------------------------

/usr/local/lib/python3.4/dist-packages/IPython/core/interactiveshell.py in run_line_magic(self, magic_name, line)
   2226                 kwargs['local_ns'] = sys._getframe(stack_depth).f_locals
   2227             with self.builtin_trap:
-> 2228                 result = fn(*args,**kwargs)
   2229             return result
   2230 

/usr/local/lib/python3.4/dist-packages/IPython/core/magics/execution.py in timeit(self, line, cell)

/usr/local/lib/python3.4/dist-packages/IPython/core/magic.py in <lambda>(f, *a, **k)
    191     # but it's overkill for just that one bit of state.
    192     def magic_deco(arg):
--> 193         call = lambda f, *a, **k: f(*a, **k)
    194 
    195         if callable(arg):

/usr/local/lib/python3.4/dist-packages/IPython/core/magics/execution.py in timeit(self, line, cell)
   1039                     break
   1040                 number *= 10
-> 1041         all_runs = timer.repeat(repeat, number)
   1042         best = min(all_runs) / number
   1043         if not quiet :

/usr/lib/python3.4/timeit.py in repeat(self, repeat, number)
    204         r = []
    205         for i in range(repeat):
--> 206             t = self.timeit(number)
    207             r.append(t)
    208         return r

/usr/local/lib/python3.4/dist-packages/IPython/core/magics/execution.py in timeit(self, number)
    130         gc.disable()
    131         try:
--> 132             timing = self.inner(it, self.timer)
    133         finally:
    134             if gcold:

<magic-timeit> in inner(_it, _timer)

/usr/local/lib/python3.4/dist-packages/sknn/mlp.py in fit(self, X, y)
    445             Returns this instance.
    446         """
--> 447         return super(Regressor, self)._fit(X, y)
    448 
    449     def predict(self, X):

/usr/local/lib/python3.4/dist-packages/sknn/mlp.py in _fit(self, *data, **extra)
    364                 "Try setting the `learning_rate` 10x lower to resolve this, for example:\n"
    365                 "    learning_rate=%f" % (self.learning_rate * 0.1)))
--> 366             raise e
    367 
    368     def _train(self, X, y, test=None):

/usr/local/lib/python3.4/dist-packages/sknn/mlp.py in _fit(self, *data, **extra)
    355     def _fit(self, *data, **extra):
    356         try:
--> 357             return self._train(*data, **extra)
    358         except RuntimeError as e:
    359             log.error("\n{}{}{}\n\n{}\n".format(

/usr/local/lib/python3.4/dist-packages/sknn/mlp.py in _train(self, X, y, test)
    373 
    374         if not self.is_initialized:
--> 375             self._initialize(X, y)
    376             X, y = self.train_set
    377         else:

/usr/local/lib/python3.4/dist-packages/sknn/mlp.py in _initialize(self, X, y)
    294 
    295         self.trainer = self._create_mlp_trainer(self.vs)
--> 296         self.trainer.setup(self.mlp, self.ds)
    297 
    298     @property

/home/localadmin/src/pylearn2/pylearn2/training_algorithms/sgd.py in setup(self, model, dataset)
    314 
    315         cost_value = self.cost.expr(model, nested_args,
--> 316                                     ** fixed_var_descr.fixed_vars)
    317 
    318         if cost_value is not None and cost_value.name is None:

/home/localadmin/src/pylearn2/pylearn2/costs/mlp/__init__.py in expr(self, model, data, **kwargs)
     44         space, sources = self.get_data_specs(model)
     45         space.validate(data)
---> 46         return model.cost_from_X(data)
     47 
     48     @wraps(Cost.is_stochastic)

/home/localadmin/src/pylearn2/pylearn2/models/mlp.py in cost_from_X(self, data)
   1068         X, Y = data
   1069         Y_hat = self.fprop(X)
-> 1070         return self.cost(Y, Y_hat)
   1071 
   1072     def cost_from_X_data_specs(self):

/home/localadmin/src/pylearn2/pylearn2/models/mlp.py in cost(self, Y, Y_hat)
   1041     def cost(self, Y, Y_hat):
   1042 
-> 1043         return self.layers[-1].cost(Y, Y_hat)
   1044 
   1045     @wraps(Layer.cost_matrix)

/home/localadmin/src/pylearn2/pylearn2/models/mlp.py in cost(self, *args, **kwargs)
   2247     def cost(self, *args, **kwargs):
   2248 
-> 2249         raise NotImplementedError()
   2250 
   2251 

NotImplementedError: 

In [45]:
nn = neurnet.Regressor(
    layers=[
        neurnet.Layer("Linear", units=64),
        neurnet.Layer("Linear", units=80),
        neurnet.Layer("Linear", units=10)],
    learning_rate=0.02,
    n_iter=100,
    batch_size=5)

%timeit -n 1 nn.fit(trainingdata, traininganswervectors)

y_valid = nn.predict(testdata)
print(accuracy(testdata, y_valid, testanswervectors))


ERROR:sknn:
A runtime exception was caught during training. This likely occurred due to
a divergence of the SGD algorithm, and NaN floats were found by PyLearn2.

Try setting the `learning_rate` 10x lower to resolve this, for example:
    learning_rate=0.002000

---------------------------------------------------------------------------
RuntimeError                              Traceback (most recent call last)
<ipython-input-45-7487324ae950> in <module>()
      8     batch_size=5)
      9 
---> 10 get_ipython().magic('timeit -n 1 nn.fit(trainingdata, traininganswervectors)')
     11 
     12 y_valid = nn.predict(testdata)

/usr/local/lib/python3.4/dist-packages/IPython/core/interactiveshell.py in magic(self, arg_s)
   2305         magic_name, _, magic_arg_s = arg_s.partition(' ')
   2306         magic_name = magic_name.lstrip(prefilter.ESC_MAGIC)
-> 2307         return self.run_line_magic(magic_name, magic_arg_s)
   2308 
   2309     #-------------------------------------------------------------------------

/usr/local/lib/python3.4/dist-packages/IPython/core/interactiveshell.py in run_line_magic(self, magic_name, line)
   2226                 kwargs['local_ns'] = sys._getframe(stack_depth).f_locals
   2227             with self.builtin_trap:
-> 2228                 result = fn(*args,**kwargs)
   2229             return result
   2230 

/usr/local/lib/python3.4/dist-packages/IPython/core/magics/execution.py in timeit(self, line, cell)

/usr/local/lib/python3.4/dist-packages/IPython/core/magic.py in <lambda>(f, *a, **k)
    191     # but it's overkill for just that one bit of state.
    192     def magic_deco(arg):
--> 193         call = lambda f, *a, **k: f(*a, **k)
    194 
    195         if callable(arg):

/usr/local/lib/python3.4/dist-packages/IPython/core/magics/execution.py in timeit(self, line, cell)
   1039                     break
   1040                 number *= 10
-> 1041         all_runs = timer.repeat(repeat, number)
   1042         best = min(all_runs) / number
   1043         if not quiet :

/usr/lib/python3.4/timeit.py in repeat(self, repeat, number)
    204         r = []
    205         for i in range(repeat):
--> 206             t = self.timeit(number)
    207             r.append(t)
    208         return r

/usr/local/lib/python3.4/dist-packages/IPython/core/magics/execution.py in timeit(self, number)
    130         gc.disable()
    131         try:
--> 132             timing = self.inner(it, self.timer)
    133         finally:
    134             if gcold:

<magic-timeit> in inner(_it, _timer)

/usr/local/lib/python3.4/dist-packages/sknn/mlp.py in fit(self, X, y)
    445             Returns this instance.
    446         """
--> 447         return super(Regressor, self)._fit(X, y)
    448 
    449     def predict(self, X):

/usr/local/lib/python3.4/dist-packages/sknn/mlp.py in _fit(self, *data, **extra)
    364                 "Try setting the `learning_rate` 10x lower to resolve this, for example:\n"
    365                 "    learning_rate=%f" % (self.learning_rate * 0.1)))
--> 366             raise e
    367 
    368     def _train(self, X, y, test=None):

/usr/local/lib/python3.4/dist-packages/sknn/mlp.py in _fit(self, *data, **extra)
    355     def _fit(self, *data, **extra):
    356         try:
--> 357             return self._train(*data, **extra)
    358         except RuntimeError as e:
    359             log.error("\n{}{}{}\n\n{}\n".format(

/usr/local/lib/python3.4/dist-packages/sknn/mlp.py in _train(self, X, y, test)
    398                       "\n---------------------------------")
    399 
--> 400         self._train_layer(self.trainer, self.mlp, self.ds)
    401         return self
    402 

/usr/local/lib/python3.4/dist-packages/sknn/nn.py in _train_layer(self, trainer, layer, dataset)
    502         for i in itertools.count(1):
    503             start = time.time()
--> 504             trainer.train(dataset=dataset)
    505 
    506             layer.monitor.report_epoch()

/home/localadmin/src/pylearn2/pylearn2/training_algorithms/sgd.py in train(self, dataset)
    468             value = param.get_value(borrow=True)
    469             if not isfinite(value):
--> 470                 raise RuntimeError("NaN in " + param.name)
    471 
    472     def continue_learning(self, model):

RuntimeError: NaN in hidden0_W

In [46]:
nn = neurnet.Regressor(
    layers=[
        neurnet.Layer("Rectifier", units=64),
        neurnet.Layer("Rectifier", units=80),
        neurnet.Layer("Rectifier", units=10)],
    learning_rate=0.02,
    n_iter=100,
    batch_size=5)

%timeit -n 1 nn.fit(trainingdata, traininganswervectors)

y_valid = nn.predict(testdata)
print(accuracy(testdata, y_valid, testanswervectors))


ERROR:sknn:
A runtime exception was caught during training. This likely occurred due to
a divergence of the SGD algorithm, and NaN floats were found by PyLearn2.

Try setting the `learning_rate` 10x lower to resolve this, for example:
    learning_rate=0.002000

---------------------------------------------------------------------------
NotImplementedError                       Traceback (most recent call last)
<ipython-input-46-0520ec1dde3f> in <module>()
      8     batch_size=5)
      9 
---> 10 get_ipython().magic('timeit -n 1 nn.fit(trainingdata, traininganswervectors)')
     11 
     12 y_valid = nn.predict(testdata)

/usr/local/lib/python3.4/dist-packages/IPython/core/interactiveshell.py in magic(self, arg_s)
   2305         magic_name, _, magic_arg_s = arg_s.partition(' ')
   2306         magic_name = magic_name.lstrip(prefilter.ESC_MAGIC)
-> 2307         return self.run_line_magic(magic_name, magic_arg_s)
   2308 
   2309     #-------------------------------------------------------------------------

/usr/local/lib/python3.4/dist-packages/IPython/core/interactiveshell.py in run_line_magic(self, magic_name, line)
   2226                 kwargs['local_ns'] = sys._getframe(stack_depth).f_locals
   2227             with self.builtin_trap:
-> 2228                 result = fn(*args,**kwargs)
   2229             return result
   2230 

/usr/local/lib/python3.4/dist-packages/IPython/core/magics/execution.py in timeit(self, line, cell)

/usr/local/lib/python3.4/dist-packages/IPython/core/magic.py in <lambda>(f, *a, **k)
    191     # but it's overkill for just that one bit of state.
    192     def magic_deco(arg):
--> 193         call = lambda f, *a, **k: f(*a, **k)
    194 
    195         if callable(arg):

/usr/local/lib/python3.4/dist-packages/IPython/core/magics/execution.py in timeit(self, line, cell)
   1039                     break
   1040                 number *= 10
-> 1041         all_runs = timer.repeat(repeat, number)
   1042         best = min(all_runs) / number
   1043         if not quiet :

/usr/lib/python3.4/timeit.py in repeat(self, repeat, number)
    204         r = []
    205         for i in range(repeat):
--> 206             t = self.timeit(number)
    207             r.append(t)
    208         return r

/usr/local/lib/python3.4/dist-packages/IPython/core/magics/execution.py in timeit(self, number)
    130         gc.disable()
    131         try:
--> 132             timing = self.inner(it, self.timer)
    133         finally:
    134             if gcold:

<magic-timeit> in inner(_it, _timer)

/usr/local/lib/python3.4/dist-packages/sknn/mlp.py in fit(self, X, y)
    445             Returns this instance.
    446         """
--> 447         return super(Regressor, self)._fit(X, y)
    448 
    449     def predict(self, X):

/usr/local/lib/python3.4/dist-packages/sknn/mlp.py in _fit(self, *data, **extra)
    364                 "Try setting the `learning_rate` 10x lower to resolve this, for example:\n"
    365                 "    learning_rate=%f" % (self.learning_rate * 0.1)))
--> 366             raise e
    367 
    368     def _train(self, X, y, test=None):

/usr/local/lib/python3.4/dist-packages/sknn/mlp.py in _fit(self, *data, **extra)
    355     def _fit(self, *data, **extra):
    356         try:
--> 357             return self._train(*data, **extra)
    358         except RuntimeError as e:
    359             log.error("\n{}{}{}\n\n{}\n".format(

/usr/local/lib/python3.4/dist-packages/sknn/mlp.py in _train(self, X, y, test)
    373 
    374         if not self.is_initialized:
--> 375             self._initialize(X, y)
    376             X, y = self.train_set
    377         else:

/usr/local/lib/python3.4/dist-packages/sknn/mlp.py in _initialize(self, X, y)
    294 
    295         self.trainer = self._create_mlp_trainer(self.vs)
--> 296         self.trainer.setup(self.mlp, self.ds)
    297 
    298     @property

/home/localadmin/src/pylearn2/pylearn2/training_algorithms/sgd.py in setup(self, model, dataset)
    314 
    315         cost_value = self.cost.expr(model, nested_args,
--> 316                                     ** fixed_var_descr.fixed_vars)
    317 
    318         if cost_value is not None and cost_value.name is None:

/home/localadmin/src/pylearn2/pylearn2/costs/mlp/__init__.py in expr(self, model, data, **kwargs)
     44         space, sources = self.get_data_specs(model)
     45         space.validate(data)
---> 46         return model.cost_from_X(data)
     47 
     48     @wraps(Cost.is_stochastic)

/home/localadmin/src/pylearn2/pylearn2/models/mlp.py in cost_from_X(self, data)
   1068         X, Y = data
   1069         Y_hat = self.fprop(X)
-> 1070         return self.cost(Y, Y_hat)
   1071 
   1072     def cost_from_X_data_specs(self):

/home/localadmin/src/pylearn2/pylearn2/models/mlp.py in cost(self, Y, Y_hat)
   1041     def cost(self, Y, Y_hat):
   1042 
-> 1043         return self.layers[-1].cost(Y, Y_hat)
   1044 
   1045     @wraps(Layer.cost_matrix)

/home/localadmin/src/pylearn2/pylearn2/models/mlp.py in cost(self, *args, **kwargs)
   2516     def cost(self, *args, **kwargs):
   2517 
-> 2518         raise NotImplementedError()
   2519 
   2520 

NotImplementedError: 

In [47]:
nn = neurnet.Regressor(
    layers=[
        neurnet.Layer("Maxout", units=64, pieces=5),
        neurnet.Layer("Maxout", units=80, pieces=5),
        neurnet.Layer("Maxout", units=10, pieces=5)],
    learning_rate=0.02,
    n_iter=100,
    batch_size=5)

%timeit -n 1 nn.fit(trainingdata, traininganswervectors)

y_valid = nn.predict(testdata)
print(accuracy(testdata, y_valid, testanswervectors))


ERROR:sknn:
A runtime exception was caught during training. This likely occurred due to
a divergence of the SGD algorithm, and NaN floats were found by PyLearn2.

Try setting the `learning_rate` 10x lower to resolve this, for example:
    learning_rate=0.002000

---------------------------------------------------------------------------
NotImplementedError                       Traceback (most recent call last)
<ipython-input-47-edbcf3dbe9fc> in <module>()
      8     batch_size=5)
      9 
---> 10 get_ipython().magic('timeit -n 1 nn.fit(trainingdata, traininganswervectors)')
     11 
     12 y_valid = nn.predict(testdata)

/usr/local/lib/python3.4/dist-packages/IPython/core/interactiveshell.py in magic(self, arg_s)
   2305         magic_name, _, magic_arg_s = arg_s.partition(' ')
   2306         magic_name = magic_name.lstrip(prefilter.ESC_MAGIC)
-> 2307         return self.run_line_magic(magic_name, magic_arg_s)
   2308 
   2309     #-------------------------------------------------------------------------

/usr/local/lib/python3.4/dist-packages/IPython/core/interactiveshell.py in run_line_magic(self, magic_name, line)
   2226                 kwargs['local_ns'] = sys._getframe(stack_depth).f_locals
   2227             with self.builtin_trap:
-> 2228                 result = fn(*args,**kwargs)
   2229             return result
   2230 

/usr/local/lib/python3.4/dist-packages/IPython/core/magics/execution.py in timeit(self, line, cell)

/usr/local/lib/python3.4/dist-packages/IPython/core/magic.py in <lambda>(f, *a, **k)
    191     # but it's overkill for just that one bit of state.
    192     def magic_deco(arg):
--> 193         call = lambda f, *a, **k: f(*a, **k)
    194 
    195         if callable(arg):

/usr/local/lib/python3.4/dist-packages/IPython/core/magics/execution.py in timeit(self, line, cell)
   1039                     break
   1040                 number *= 10
-> 1041         all_runs = timer.repeat(repeat, number)
   1042         best = min(all_runs) / number
   1043         if not quiet :

/usr/lib/python3.4/timeit.py in repeat(self, repeat, number)
    204         r = []
    205         for i in range(repeat):
--> 206             t = self.timeit(number)
    207             r.append(t)
    208         return r

/usr/local/lib/python3.4/dist-packages/IPython/core/magics/execution.py in timeit(self, number)
    130         gc.disable()
    131         try:
--> 132             timing = self.inner(it, self.timer)
    133         finally:
    134             if gcold:

<magic-timeit> in inner(_it, _timer)

/usr/local/lib/python3.4/dist-packages/sknn/mlp.py in fit(self, X, y)
    445             Returns this instance.
    446         """
--> 447         return super(Regressor, self)._fit(X, y)
    448 
    449     def predict(self, X):

/usr/local/lib/python3.4/dist-packages/sknn/mlp.py in _fit(self, *data, **extra)
    364                 "Try setting the `learning_rate` 10x lower to resolve this, for example:\n"
    365                 "    learning_rate=%f" % (self.learning_rate * 0.1)))
--> 366             raise e
    367 
    368     def _train(self, X, y, test=None):

/usr/local/lib/python3.4/dist-packages/sknn/mlp.py in _fit(self, *data, **extra)
    355     def _fit(self, *data, **extra):
    356         try:
--> 357             return self._train(*data, **extra)
    358         except RuntimeError as e:
    359             log.error("\n{}{}{}\n\n{}\n".format(

/usr/local/lib/python3.4/dist-packages/sknn/mlp.py in _train(self, X, y, test)
    373 
    374         if not self.is_initialized:
--> 375             self._initialize(X, y)
    376             X, y = self.train_set
    377         else:

/usr/local/lib/python3.4/dist-packages/sknn/mlp.py in _initialize(self, X, y)
    294 
    295         self.trainer = self._create_mlp_trainer(self.vs)
--> 296         self.trainer.setup(self.mlp, self.ds)
    297 
    298     @property

/home/localadmin/src/pylearn2/pylearn2/training_algorithms/sgd.py in setup(self, model, dataset)
    314 
    315         cost_value = self.cost.expr(model, nested_args,
--> 316                                     ** fixed_var_descr.fixed_vars)
    317 
    318         if cost_value is not None and cost_value.name is None:

/home/localadmin/src/pylearn2/pylearn2/costs/mlp/__init__.py in expr(self, model, data, **kwargs)
     44         space, sources = self.get_data_specs(model)
     45         space.validate(data)
---> 46         return model.cost_from_X(data)
     47 
     48     @wraps(Cost.is_stochastic)

/home/localadmin/src/pylearn2/pylearn2/models/mlp.py in cost_from_X(self, data)
   1068         X, Y = data
   1069         Y_hat = self.fprop(X)
-> 1070         return self.cost(Y, Y_hat)
   1071 
   1072     def cost_from_X_data_specs(self):

/home/localadmin/src/pylearn2/pylearn2/models/mlp.py in cost(self, Y, Y_hat)
   1041     def cost(self, Y, Y_hat):
   1042 
-> 1043         return self.layers[-1].cost(Y, Y_hat)
   1044 
   1045     @wraps(Layer.cost_matrix)

/home/localadmin/src/pylearn2/pylearn2/models/mlp.py in cost(self, Y, Y_hat)
    211 
    212         raise NotImplementedError(
--> 213             str(type(self)) + " does not implement mlp.Layer.cost.")
    214 
    215     def cost_from_cost_matrix(self, cost_matrix):

NotImplementedError: <class 'pylearn2.models.maxout.Maxout'> does not implement mlp.Layer.cost.

In [48]:
nn = neurnet.Regressor(
    layers=[
        neurnet.Layer("Gaussian", units=64),
        neurnet.Layer("Gaussian", units=80),
        neurnet.Layer("Gaussian", units=10)],
    learning_rate=0.02,
    n_iter=100,
    batch_size=5)

%timeit -n 1 nn.fit(trainingdata, traininganswervectors)

y_valid = nn.predict(testdata)
print(accuracy(testdata, y_valid, testanswervectors))


ERROR:sknn:
A runtime exception was caught during training. This likely occurred due to
a divergence of the SGD algorithm, and NaN floats were found by PyLearn2.

Try setting the `learning_rate` 10x lower to resolve this, for example:
    learning_rate=0.002000

---------------------------------------------------------------------------
RuntimeError                              Traceback (most recent call last)
<ipython-input-48-6f22f9c067d5> in <module>()
      8     batch_size=5)
      9 
---> 10 get_ipython().magic('timeit -n 1 nn.fit(trainingdata, traininganswervectors)')
     11 
     12 y_valid = nn.predict(testdata)

/usr/local/lib/python3.4/dist-packages/IPython/core/interactiveshell.py in magic(self, arg_s)
   2305         magic_name, _, magic_arg_s = arg_s.partition(' ')
   2306         magic_name = magic_name.lstrip(prefilter.ESC_MAGIC)
-> 2307         return self.run_line_magic(magic_name, magic_arg_s)
   2308 
   2309     #-------------------------------------------------------------------------

/usr/local/lib/python3.4/dist-packages/IPython/core/interactiveshell.py in run_line_magic(self, magic_name, line)
   2226                 kwargs['local_ns'] = sys._getframe(stack_depth).f_locals
   2227             with self.builtin_trap:
-> 2228                 result = fn(*args,**kwargs)
   2229             return result
   2230 

/usr/local/lib/python3.4/dist-packages/IPython/core/magics/execution.py in timeit(self, line, cell)

/usr/local/lib/python3.4/dist-packages/IPython/core/magic.py in <lambda>(f, *a, **k)
    191     # but it's overkill for just that one bit of state.
    192     def magic_deco(arg):
--> 193         call = lambda f, *a, **k: f(*a, **k)
    194 
    195         if callable(arg):

/usr/local/lib/python3.4/dist-packages/IPython/core/magics/execution.py in timeit(self, line, cell)
   1039                     break
   1040                 number *= 10
-> 1041         all_runs = timer.repeat(repeat, number)
   1042         best = min(all_runs) / number
   1043         if not quiet :

/usr/lib/python3.4/timeit.py in repeat(self, repeat, number)
    204         r = []
    205         for i in range(repeat):
--> 206             t = self.timeit(number)
    207             r.append(t)
    208         return r

/usr/local/lib/python3.4/dist-packages/IPython/core/magics/execution.py in timeit(self, number)
    130         gc.disable()
    131         try:
--> 132             timing = self.inner(it, self.timer)
    133         finally:
    134             if gcold:

<magic-timeit> in inner(_it, _timer)

/usr/local/lib/python3.4/dist-packages/sknn/mlp.py in fit(self, X, y)
    445             Returns this instance.
    446         """
--> 447         return super(Regressor, self)._fit(X, y)
    448 
    449     def predict(self, X):

/usr/local/lib/python3.4/dist-packages/sknn/mlp.py in _fit(self, *data, **extra)
    364                 "Try setting the `learning_rate` 10x lower to resolve this, for example:\n"
    365                 "    learning_rate=%f" % (self.learning_rate * 0.1)))
--> 366             raise e
    367 
    368     def _train(self, X, y, test=None):

/usr/local/lib/python3.4/dist-packages/sknn/mlp.py in _fit(self, *data, **extra)
    355     def _fit(self, *data, **extra):
    356         try:
--> 357             return self._train(*data, **extra)
    358         except RuntimeError as e:
    359             log.error("\n{}{}{}\n\n{}\n".format(

/usr/local/lib/python3.4/dist-packages/sknn/mlp.py in _train(self, X, y, test)
    398                       "\n---------------------------------")
    399 
--> 400         self._train_layer(self.trainer, self.mlp, self.ds)
    401         return self
    402 

/usr/local/lib/python3.4/dist-packages/sknn/nn.py in _train_layer(self, trainer, layer, dataset)
    502         for i in itertools.count(1):
    503             start = time.time()
--> 504             trainer.train(dataset=dataset)
    505 
    506             layer.monitor.report_epoch()

/home/localadmin/src/pylearn2/pylearn2/training_algorithms/sgd.py in train(self, dataset)
    468             value = param.get_value(borrow=True)
    469             if not isfinite(value):
--> 470                 raise RuntimeError("NaN in " + param.name)
    471 
    472     def continue_learning(self, model):

RuntimeError: NaN in hidden0_W

In [49]:
nn = neurnet.Regressor(
    layers=[
        neurnet.Layer("Softmax", units=64),
        neurnet.Layer("Softmax", units=80),
        neurnet.Layer("Softmax", units=10)],
    learning_rate=0.02,
    n_iter=100,
    batch_size=5)

%timeit -n 1 nn.fit(trainingdata, traininganswervectors)

y_valid = nn.predict(testdata)
print(accuracy(testdata, y_valid, testanswervectors))


1 loops, best of 3: 9.49 s per loop
0.458

In [ ]:
time = [9.57, 9.49]
acc = [0.892. 0.458]