In [2]:
import keras
import numpy as np
import theano
print keras.__version__
print np.__version__
print theano.__version__


0.3.0
1.8.2
0.7.0.dev-e521b20e578c033d51e548181bd1edd24af64427

In [3]:
import keras
import theano
print keras.__version__
print np.__version__
print theano.__version__


0.3.0
1.8.2
0.7.0.dev-e521b20e578c033d51e548181bd1edd24af64427

In [25]:
import keras
import theano
print keras.__version__
print np.__version__
print theano.__version__


0.3.0
1.10.4
0.7.0.dev-e521b20e578c033d51e548181bd1edd24af64427

In [1]:
%pylab inline

from pylab import rcParams
rcParams['figure.figsize'] = 15, 5


Populating the interactive namespace from numpy and matplotlib

In [2]:
import numpy as np

In [3]:
import random

In [4]:
def one_every_n(length, n):
    i = random.randint(0, n)
    x = np.array(range(length))
    return np.remainder(x, n) == i

In [5]:
plot(one_every_n(100, 10))


Out[5]:
[<matplotlib.lines.Line2D at 0x7f746e210bd0>]

In [6]:
def batch(batch_size, fn, *args):
    X = np.array([fn(*args) for i in range(batch_size)])
    return X[:,:-1,None], X[:,1:,None]

In [7]:
batch(10, one_every_n, 10, 2)[0].shape


Out[7]:
(10, 9, 1)

In [8]:
import random

In [9]:
from keras.models import Sequential
from keras.layers.recurrent import LSTM, GRU, SimpleRNN
from keras.optimizers import SGD
from keras.layers.normalization import BatchNormalization
from keras.layers.core import TimeDistributedDense


Using gpu device 0: GRID K520 (CNMeM is disabled, CuDNN 3007)
/usr/local/lib/python2.7/dist-packages/theano/tensor/signal/downsample.py:5: UserWarning: downsample module has been moved to the pool module.
  warnings.warn("downsample module has been moved to the pool module.")
Using Theano backend.

In [10]:
def random_sample():
    return {
        'learning_rate': random.uniform(0.1, 1),
        'recurrent_layer': random.choice([LSTM, GRU, SimpleRNN]),
        'activation': random.choice(['sigmoid', 'tanh']),
        'every_n': random.randint(2, 20),
        'n_hidden': random.randint(1, 20),
        'n_layers': random.randint(2, 4),
        'tdd_in': random.choice([True, False]),
        'tdd_out': random.choice([True, False]),
    }

In [11]:
def build_model(learning_rate=None, recurrent_layer=None, n_hidden=None, n_layers=None, tdd_in=None, tdd_out=None, activation=None, **kwargs):
    model = Sequential()

    if tdd_in:
        model.add(TimeDistributedDense(input_dim=1, output_dim=n_hidden))
    else:
        model.add(recurrent_layer(input_dim=1, output_dim=n_hidden, activation=activation, return_sequences=True))

    for i in range(n_layers - 2):
        model.add(recurrent_layer(input_dim=n_hidden, output_dim=n_hidden, activation=activation, return_sequences=True))

    if tdd_out:
        model.add(TimeDistributedDense(input_dim=n_hidden, output_dim=1))
    else:
        model.add(recurrent_layer(input_dim=n_hidden, output_dim=1, activation=activation, return_sequences=True))

    # sgd = SGD(lr=learning_rate)
    
    # model.compile(loss='mean_squared_error', optimizer=sgd)
    model.compile(loss='mean_squared_error', optimizer='rmsprop')

    return model

In [12]:
BATCH_SIZE = 1024*128
LENGTH = 128

In [13]:
8*60/3.


Out[13]:
160.0

In [14]:
params = {
    'learning_rate': 0.1,
    'recurrent_layer': SimpleRNN,
    'activation': 'tanh',
    'every_n': 20,
    'n_hidden': 2,
    'n_layers': 2,
    'tdd_in': False,
    'tdd_out': False,
}

In [15]:
from IPython.display import clear_output
import json

# for i in range(1000):
for i in [1]:
    # build model
    # params = random_sample()
    model = build_model(**params)
    
    print model

    # make training data
    X, Y = batch(BATCH_SIZE, one_every_n, LENGTH, params['every_n'])

    # train model
    history = model.fit(X, Y, nb_epoch=1, batch_size=128)

    # log to file one line per model, data represented by json
    params['loss'] = history.history['loss'][0]
    params['recurrent_layer'] = params['recurrent_layer'].__name__
    with open('log', 'a') as f:
        f.write(json.dumps(params))
    
    clear_output()


<keras.models.Sequential object at 0x7f7448377b90>
Epoch 1/1
 28544/131072 [=====>........................] - ETA: 31s - loss: 0.2654
---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-15-af02ba482065> in <module>()
     14 
     15     # train model
---> 16     history = model.fit(X, Y, nb_epoch=1, batch_size=128)
     17 
     18     # log to file one line per model, data represented by json

/usr/local/lib/python2.7/dist-packages/Keras-0.3.0-py2.7.egg/keras/models.pyc in fit(self, X, y, batch_size, nb_epoch, verbose, callbacks, validation_split, validation_data, shuffle, show_accuracy, class_weight, sample_weight)
    579                          verbose=verbose, callbacks=callbacks,
    580                          val_f=val_f, val_ins=val_ins,
--> 581                          shuffle=shuffle, metrics=metrics)
    582 
    583     def predict(self, X, batch_size=128, verbose=0):

/usr/local/lib/python2.7/dist-packages/Keras-0.3.0-py2.7.egg/keras/models.pyc in _fit(self, f, ins, out_labels, batch_size, nb_epoch, verbose, callbacks, val_f, val_ins, shuffle, metrics)
    237                 batch_logs['size'] = len(batch_ids)
    238                 callbacks.on_batch_begin(batch_index, batch_logs)
--> 239                 outs = f(ins_batch)
    240                 if type(outs) != list:
    241                     outs = [outs]

/usr/local/lib/python2.7/dist-packages/Keras-0.3.0-py2.7.egg/keras/backend/theano_backend.pyc in __call__(self, inputs)
    371 
    372     def __call__(self, inputs):
--> 373         return self.function(*inputs)
    374 
    375 

/usr/local/lib/python2.7/dist-packages/theano/compile/function_module.pyc in __call__(self, *args, **kwargs)
    857         t0_fn = time.time()
    858         try:
--> 859             outputs = self.fn()
    860         except Exception:
    861             if hasattr(self.fn, 'position_of_error'):

/usr/local/lib/python2.7/dist-packages/theano/scan_module/scan_op.pyc in rval(p, i, o, n, allow_gc)
    949         def rval(p=p, i=node_input_storage, o=node_output_storage, n=node,
    950                  allow_gc=allow_gc):
--> 951             r = p(n, [x[0] for x in i], o)
    952             for o in node.outputs:
    953                 compute_map[o][0] = True

/usr/local/lib/python2.7/dist-packages/theano/scan_module/scan_op.pyc in <lambda>(node, args, outs)
    938                         args,
    939                         outs,
--> 940                         self, node)
    941         except (ImportError, theano.gof.cmodule.MissingGXX):
    942             p = self.execute

KeyboardInterrupt: 

In [429]:
# predict one example
X, Y = batch(1, one_every_n, LENGTH, params['every_n'])
Y_out = model.predict(X)

In [430]:
# plot example
print params['loss']
plot(X[0,:])
# show()
# plot(Y[0,:])
plot(Y_out[0,:])
# show()


0.237076640657
Out[430]:
[<matplotlib.lines.Line2D at 0x7fb503e36f50>]

In [ ]:


In [ ]:


In [354]:
with open('log') as f:
    experiments = f.read().split('}{')
    experiments[0] = experiments[0][1:]
    experiments[-1] = experiments[-1][:-1]

In [356]:
experiments = [
    json.loads('{'+e+'}') for e in experiments
]

In [358]:
import pandas as pd

In [412]:
df = pd.DataFrame(experiments)
df = df[df['loss'] < 0.075]

In [413]:
for c in ['activation', 'recurrent_layer']:
    d = {k: v for v, k in enumerate(sorted(list(set(df[c]))))}
    print c, d
    df[c] = [d[e] for e in df[c]]


activation {u'tanh': 1, u'sigmoid': 0}
recurrent_layer {u'LSTM': 1, u'GRU': 0, u'SimpleRNN': 2}

In [414]:
for c in ['n_layers', 'tdd_in', 'tdd_out', 'activation', 'recurrent_layer']:
    df[c] += np.random.random(len(df))*0.2

In [415]:
from bokeh.models import ColumnDataSource
from bokeh.plotting import figure, gridplot, show
from bokeh.io import output_notebook

output_notebook()

# create a column data source for the plots to share
source = ColumnDataSource(data=df)

TOOLS = "box_select,lasso_select,help,box_zoom"

plotss = []
plots = []
for column in df.columns:
    if column == 'loss':
        continue
    
    f = figure(tools=TOOLS, width=300, height=300, title=column)
    f.circle(column, 'loss', source=source)
    plots.append(f)
    if len(plots) == 3:
        plotss.append(plots)
        plots = []

plotss.append(plots)
p = gridplot(plotss)

show(p)


Loading BokehJS ...
Out[415]:

<Bokeh Notebook handle for In[150]>


In [ ]: