In [1]:
import numpy as np

from msmbuilder.example_datasets import FsPeptide
fs = FsPeptide().get()

from msmbuilder.featurizer import DihedralFeaturizer
dhf = DihedralFeaturizer()
dhft = dhf.fit_transform(fs.trajectories)

from sklearn.decomposition import PCA
pca = PCA(whiten=True)
pca.fit(np.vstack(dhft))

import matplotlib.pyplot as plt
%matplotlib inline
plt.plot(100*np.cumsum(pca.explained_variance_ratio_))
plt.hlines(100.0,0,pca.n_components_,linestyles='--')
plt.ylim(0,100)
plt.xlabel('Number of components')
plt.ylabel('Cumulative explained variance (%)')

#n_comp = sum(np.cumsum(pca.explained_variance_ratio_)<0.95)

#X_ = pca.transform(np.vstack(dhft))[:,:n_comp]

X_ = np.vstack(dhft)
X_ -= X_.mean(0)
X_ /= X_.std(0)
X_train_ = X_[:200000]
X_test_ = X_[200000:]

#npr.seed(0)
#mask = npr.rand(len(X_))<0.7
#X_train_ = X_[mask]
#X_test_ = X_[-mask]


loading trajectory_1.xtc...
loading trajectory_10.xtc...
loading trajectory_11.xtc...
loading trajectory_12.xtc...
loading trajectory_13.xtc...
loading trajectory_14.xtc...
loading trajectory_15.xtc...
loading trajectory_16.xtc...
loading trajectory_17.xtc...
loading trajectory_18.xtc...
loading trajectory_19.xtc...
loading trajectory_2.xtc...
loading trajectory_20.xtc...
loading trajectory_21.xtc...
loading trajectory_22.xtc...
loading trajectory_23.xtc...
loading trajectory_24.xtc...
loading trajectory_25.xtc...
loading trajectory_26.xtc...
loading trajectory_27.xtc...
loading trajectory_28.xtc...
loading trajectory_3.xtc...
loading trajectory_4.xtc...
loading trajectory_5.xtc...
loading trajectory_6.xtc...
loading trajectory_7.xtc...
loading trajectory_8.xtc...
loading trajectory_9.xtc...

In [53]:
print('The first 2 components only explain {0:.2f}% of the variance'.format(100*np.cumsum(pca.explained_variance_ratio_)[1]))


The first 2 components only explain 19.97% of the variance

In [3]:
from keras.preprocessing import sequence
from keras.optimizers import SGD, RMSprop, Adagrad
from keras.utils import np_utils
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.layers.embeddings import Embedding
from keras.layers.recurrent import LSTM, GRU
from keras.datasets import imdb


/Users/joshuafass/anaconda/envs/py27/lib/python2.7/site-packages/scipy/lib/_util.py:67: DeprecationWarning: Module scipy.linalg.blas.fblas is deprecated, use scipy.linalg.blas instead
  DeprecationWarning)
/Users/joshuafass/anaconda/envs/py27/lib/python2.7/site-packages/scipy/lib/_util.py:67: DeprecationWarning: Module scipy.linalg.blas.fblas is deprecated, use scipy.linalg.blas instead
  DeprecationWarning)
/Users/joshuafass/anaconda/envs/py27/lib/python2.7/site-packages/scipy/lib/_util.py:67: DeprecationWarning: Module scipy.linalg.blas.fblas is deprecated, use scipy.linalg.blas instead
  DeprecationWarning)
/Users/joshuafass/anaconda/envs/py27/lib/python2.7/site-packages/scipy/lib/_util.py:67: DeprecationWarning: Module scipy.linalg.blas.fblas is deprecated, use scipy.linalg.blas instead
  DeprecationWarning)
/Users/joshuafass/anaconda/envs/py27/lib/python2.7/site-packages/scipy/lib/_util.py:67: DeprecationWarning: Module scipy.linalg.blas.fblas is deprecated, use scipy.linalg.blas instead
  DeprecationWarning)
/Users/joshuafass/anaconda/envs/py27/lib/python2.7/site-packages/scipy/lib/_util.py:67: DeprecationWarning: Module scipy.linalg.blas.fblas is deprecated, use scipy.linalg.blas instead
  DeprecationWarning)
/Users/joshuafass/anaconda/envs/py27/lib/python2.7/site-packages/scipy/lib/_util.py:67: DeprecationWarning: Module scipy.linalg.blas.fblas is deprecated, use scipy.linalg.blas instead
  DeprecationWarning)
/Users/joshuafass/anaconda/envs/py27/lib/python2.7/site-packages/scipy/lib/_util.py:67: DeprecationWarning: Module scipy.linalg.blas.fblas is deprecated, use scipy.linalg.blas instead
  DeprecationWarning)

In [12]:
from keras.layers.recurrent import LSTM

lstm= LSTM(10, output_dim=128, 
        init='glorot_uniform', inner_init='orthogonal', 
        activation='tanh', inner_activation='hard_sigmoid',
        weights=None, truncate_gradient=-1, return_sequences=False)

In [14]:
model = Sequential()
model.add(LSTM(256, 128)) # try using a GRU instead, for fun
model.add(Dropout(0.5))
model.add(Dense(128, 1))
model.add(Activation('sigmoid'))

In [16]:
imdb_data = imdb.load_data()

In [22]:
X_[0].shape,np.hstack(X_[:2]).shape


Out[22]:
((85,), (170,))

In [7]:
sequence.skipgrams(X_[:100],10)


---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-7-e14ac1328d6c> in <module>()
----> 1 sequence.skipgrams(X_[:100],10)

/Users/joshuafass/anaconda/envs/py27/lib/python2.7/site-packages/Keras-0.0.1-py2.7.egg/keras/preprocessing/sequence.pyc in skipgrams(sequence, vocabulary_size, window_size, negative_samples, shuffle, categorical, sampling_table)
     67     labels = []
     68     for i, wi in enumerate(sequence):
---> 69         if not wi:
     70             continue
     71         if sampling_table is not None:

ValueError: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()

In [94]:
def generate_skip_gram_couples(sequences,neighborhood_size=10,n_examples=100000,prop_positive=0.5):
    np.random.seed(0)
    couples = np.zeros((n_examples,2*sequences[0].shape[1]))
    y = np.zeros((n_examples,2))
    
    for i in range(n_examples):
        ind1=np.random.randint(len(sequences))
        sequence = sequences[ind1]
        pivot = np.random.randint(len(sequence)-neighborhood_size)
        if np.random.rand()<prop_positive:
            label=1
            other = np.random.randint(neighborhood_size)+pivot
            couples[i] = np.hstack((sequence[pivot],sequence[other]))
        else:
            label=0
            ind2 = np.random.randint(len(sequences))
            sequence2 = sequences[ind2]
            other = np.random.randint(len(sequence))
            while ind1==ind2 and abs(other-pivot) < neighborhood_size:
                ind2 = np.random.randint(len(sequences))
                sequence2 = sequences[ind2]
                other = np.random.randint(len(sequence2))
            couples[i] = np.hstack((sequence[pivot],sequence2[other]))
                
        
        y[i,label] = 1
    return couples,y

In [95]:
X,y=generate_skip_gram_couples(dhft)

In [96]:
np.random.seed(0)
mask = np.random.rand(len(X))<0.7
X_train,y_train = X[mask],y[mask]
X_test,y_test = X[-mask],y[-mask]


/Users/joshuafass/anaconda/envs/py27/lib/python2.7/site-packages/IPython/kernel/__main__.py:4: DeprecationWarning: numpy boolean negative (the unary `-` operator) is deprecated, use the bitwise_xor (the `^` operator) or the logical_xor function instead.
/Users/joshuafass/anaconda/envs/py27/lib/python2.7/site-packages/IPython/kernel/__main__.py:4: DeprecationWarning: numpy boolean negative (the unary `-` operator) is deprecated, use the bitwise_xor (the `^` operator) or the logical_xor function instead.

In [97]:
X.shape,y.shape


Out[97]:
((100000, 168), (100000, 2))

In [98]:
model = Sequential()
model.add(Dense(168, 200))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(200, 50))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(50, 2))
model.add(Activation('relu'))
model.add(Dense(2, 2))

rms = RMSprop()
model.compile(loss='mse', optimizer=rms)

model.fit(X_train, y_train, batch_size=10, nb_epoch=10, show_accuracy=True, verbose=2, validation_data=(X_test, y_test))
score = model.evaluate(X_test, y_test, show_accuracy=False, verbose=0)


Train on 69979 samples, validate on 30021 samples
Epoch 0
5s - loss: 0.1353 - acc.: 0.7947 - val. loss: 0.0215 - val. acc.: 0.9753
Epoch 1
6s - loss: 0.0331 - acc.: 0.9599 - val. loss: 0.0215 - val. acc.: 0.9741
Epoch 2
6s - loss: 0.0274 - acc.: 0.9665 - val. loss: 0.0181 - val. acc.: 0.9781
Epoch 3
6s - loss: 0.0254 - acc.: 0.9695 - val. loss: 0.0177 - val. acc.: 0.9787
Epoch 4
6s - loss: 0.0237 - acc.: 0.9717 - val. loss: 0.0172 - val. acc.: 0.9803
Epoch 5
6s - loss: 0.0221 - acc.: 0.9739 - val. loss: 0.0176 - val. acc.: 0.9794
Epoch 6
7s - loss: 0.0211 - acc.: 0.9755 - val. loss: 0.0162 - val. acc.: 0.9812
Epoch 7
7s - loss: 0.0200 - acc.: 0.9765 - val. loss: 0.0162 - val. acc.: 0.9815
Epoch 8
7s - loss: 0.0198 - acc.: 0.9766 - val. loss: 0.0163 - val. acc.: 0.9810
Epoch 9
7s - loss: 0.0197 - acc.: 0.9770 - val. loss: 0.0161 - val. acc.: 0.9815

In [99]:
model = Sequential()
model.add(Dense(168, 200))
model.add(Activation('tanh'))
model.add(Dropout(0.5))
model.add(Dense(200, 50))
model.add(Activation('tanh'))
model.add(Dropout(0.5))
model.add(Dense(50, 2))
model.add(Activation('tanh'))
model.add(Dense(2, 2))

rms = RMSprop()
model.compile(loss='mse', optimizer=rms)

model.fit(X_train, y_train, batch_size=10, nb_epoch=10, show_accuracy=True, verbose=2, validation_data=(X_test, y_test))
score = model.evaluate(X_test, y_test, show_accuracy=False, verbose=0)


Train on 69979 samples, validate on 30021 samples
Epoch 0
5s - loss: 0.2487 - acc.: 0.5182 - val. loss: 0.2237 - val. acc.: 0.6411
Epoch 1
5s - loss: 0.1315 - acc.: 0.8085 - val. loss: 0.0386 - val. acc.: 0.9587
Epoch 2
5s - loss: 0.0681 - acc.: 0.9099 - val. loss: 0.0323 - val. acc.: 0.9583
Epoch 3
5s - loss: 0.0532 - acc.: 0.9315 - val. loss: 0.0245 - val. acc.: 0.9697
Epoch 4
5s - loss: 0.0446 - acc.: 0.9436 - val. loss: 0.0254 - val. acc.: 0.9686
Epoch 5
5s - loss: 0.0405 - acc.: 0.9489 - val. loss: 0.0276 - val. acc.: 0.9661
Epoch 6
5s - loss: 0.0377 - acc.: 0.9532 - val. loss: 0.0221 - val. acc.: 0.9736
Epoch 7
5s - loss: 0.0354 - acc.: 0.9566 - val. loss: 0.0240 - val. acc.: 0.9717
Epoch 8
5s - loss: 0.0335 - acc.: 0.9586 - val. loss: 0.0221 - val. acc.: 0.9741
Epoch 9
5s - loss: 0.0316 - acc.: 0.9612 - val. loss: 0.0238 - val. acc.: 0.9720

In [100]:
model = Sequential()
model.add(Dense(168, 200))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(200, 50))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(50, 2))

rms = RMSprop()
model.compile(loss='mse', optimizer=rms)

In [101]:
model.fit(X_train, y_train, batch_size=10, nb_epoch=10, show_accuracy=True, verbose=2, validation_data=(X_test, y_test))
score = model.evaluate(X_test, y_test, show_accuracy=False, verbose=0)


Train on 69979 samples, validate on 30021 samples
Epoch 0
5s - loss: 0.0903 - acc.: 0.9001 - val. loss: 0.0263 - val. acc.: 0.9767
Epoch 1
7s - loss: 0.0429 - acc.: 0.9654 - val. loss: 0.0203 - val. acc.: 0.9782
Epoch 2
8s - loss: 0.0388 - acc.: 0.9714 - val. loss: 0.0209 - val. acc.: 0.9786
Epoch 3
9s - loss: 0.0371 - acc.: 0.9734 - val. loss: 0.0188 - val. acc.: 0.9796
Epoch 4
10s - loss: 0.0353 - acc.: 0.9751 - val. loss: 0.0184 - val. acc.: 0.9798
Epoch 5
10s - loss: 0.0349 - acc.: 0.9768 - val. loss: 0.0198 - val. acc.: 0.9796
Epoch 6
11s - loss: 0.0349 - acc.: 0.9769 - val. loss: 0.0187 - val. acc.: 0.9804
Epoch 7
11s - loss: 0.0340 - acc.: 0.9775 - val. loss: 0.0184 - val. acc.: 0.9805
Epoch 8
11s - loss: 0.0337 - acc.: 0.9786 - val. loss: 0.0180 - val. acc.: 0.9816
Epoch 9
12s - loss: 0.0333 - acc.: 0.9790 - val. loss: 0.0183 - val. acc.: 0.9811

In [102]:
model = Sequential()
model.add(Dense(168, 2))
model.add(Activation('linear'))

rms = RMSprop()
model.compile(loss='mse', optimizer=rms)

model.fit(X_train, y_train, batch_size=10, nb_epoch=10, show_accuracy=True, verbose=2, validation_data=(X_test, y_test))
score = model.evaluate(X_test, y_test, show_accuracy=False, verbose=0)


Train on 69979 samples, validate on 30021 samples
Epoch 0
0s - loss: 0.2661 - acc.: 0.5008 - val. loss: 0.2558 - val. acc.: 0.4905
Epoch 1
0s - loss: 0.2599 - acc.: 0.5004 - val. loss: 0.2539 - val. acc.: 0.4992
Epoch 2
0s - loss: 0.2597 - acc.: 0.5023 - val. loss: 0.2570 - val. acc.: 0.5035
Epoch 3
0s - loss: 0.2599 - acc.: 0.5001 - val. loss: 0.2555 - val. acc.: 0.4793
Epoch 4
0s - loss: 0.2594 - acc.: 0.5049 - val. loss: 0.2549 - val. acc.: 0.4809
Epoch 5
0s - loss: 0.2596 - acc.: 0.5018 - val. loss: 0.2552 - val. acc.: 0.5011
Epoch 6
0s - loss: 0.2594 - acc.: 0.5021 - val. loss: 0.2564 - val. acc.: 0.4884
Epoch 7
0s - loss: 0.2595 - acc.: 0.5024 - val. loss: 0.2548 - val. acc.: 0.5143
Epoch 8
0s - loss: 0.2597 - acc.: 0.5016 - val. loss: 0.2698 - val. acc.: 0.4870
Epoch 9
0s - loss: 0.2596 - acc.: 0.4987 - val. loss: 0.3171 - val. acc.: 0.4941

In [103]:
model = Sequential()
model.add(Dense(168, 200))
model.add(Activation('tanh'))
model.add(Dropout(0.5))
model.add(Dense(200, 50))
model.add(Activation('tanh'))
model.add(Dropout(0.5))
model.add(Dense(50, 2))

rms = RMSprop()
model.compile(loss='mse', optimizer=rms)
model.fit(X_train, y_train, batch_size=10, nb_epoch=10, show_accuracy=True, verbose=2, validation_data=(X_test, y_test))
score = model.evaluate(X_test, y_test, show_accuracy=False, verbose=0)


Train on 69979 samples, validate on 30021 samples
Epoch 0
5s - loss: 0.1662 - acc.: 0.7579 - val. loss: 0.0544 - val. acc.: 0.9499
Epoch 1
5s - loss: 0.0770 - acc.: 0.9102 - val. loss: 0.0323 - val. acc.: 0.9600
Epoch 2
5s - loss: 0.0576 - acc.: 0.9350 - val. loss: 0.0318 - val. acc.: 0.9587
Epoch 3
5s - loss: 0.0486 - acc.: 0.9460 - val. loss: 0.0239 - val. acc.: 0.9716
Epoch 4
5s - loss: 0.0441 - acc.: 0.9523 - val. loss: 0.0226 - val. acc.: 0.9756
Epoch 5
5s - loss: 0.0401 - acc.: 0.9574 - val. loss: 0.0221 - val. acc.: 0.9733
Epoch 6
5s - loss: 0.0383 - acc.: 0.9597 - val. loss: 0.0185 - val. acc.: 0.9781
Epoch 7
5s - loss: 0.0359 - acc.: 0.9623 - val. loss: 0.0194 - val. acc.: 0.9766
Epoch 8
5s - loss: 0.0343 - acc.: 0.9643 - val. loss: 0.0183 - val. acc.: 0.9779
Epoch 9
5s - loss: 0.0334 - acc.: 0.9651 - val. loss: 0.0188 - val. acc.: 0.9771

In [104]:
model = Sequential()
model.add(Dense(168, 200))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(200, 50))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(50, 2))

rms = RMSprop()
model.compile(loss='mse', optimizer=rms)
model.fit(X_train, y_train, batch_size=10, nb_epoch=10, show_accuracy=True, verbose=2, validation_data=(X_test, y_test))
score = model.evaluate(X_test, y_test, show_accuracy=False, verbose=0)


Train on 69979 samples, validate on 30021 samples
Epoch 0
5s - loss: 0.0583 - acc.: 0.9424 - val. loss: 0.0219 - val. acc.: 0.9779
Epoch 1
5s - loss: 0.0257 - acc.: 0.9774 - val. loss: 0.0182 - val. acc.: 0.9793
Epoch 2
6s - loss: 0.0227 - acc.: 0.9802 - val. loss: 0.0178 - val. acc.: 0.9793
Epoch 3
7s - loss: 0.0210 - acc.: 0.9817 - val. loss: 0.0196 - val. acc.: 0.9796
Epoch 4
7s - loss: 0.0199 - acc.: 0.9830 - val. loss: 0.0173 - val. acc.: 0.9801
Epoch 5
8s - loss: 0.0194 - acc.: 0.9839 - val. loss: 0.0170 - val. acc.: 0.9811
Epoch 6
8s - loss: 0.0189 - acc.: 0.9842 - val. loss: 0.0169 - val. acc.: 0.9804
Epoch 7
8s - loss: 0.0185 - acc.: 0.9844 - val. loss: 0.0172 - val. acc.: 0.9812
Epoch 8
9s - loss: 0.0181 - acc.: 0.9853 - val. loss: 0.0168 - val. acc.: 0.9808
Epoch 9
9s - loss: 0.0180 - acc.: 0.9853 - val. loss: 0.0182 - val. acc.: 0.9804

In [105]:
model = Sequential()
model.add(Dense(168, 200))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(200, 5))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(5, 2))

rms = RMSprop()
model.compile(loss='mse', optimizer=rms)
model.fit(X_train, y_train, batch_size=10, nb_epoch=10, show_accuracy=True, verbose=2, validation_data=(X_test, y_test))
score = model.evaluate(X_test, y_test, show_accuracy=False, verbose=0)


Train on 69979 samples, validate on 30021 samples
Epoch 0
4s - loss: 0.0891 - acc.: 0.9075 - val. loss: 0.0300 - val. acc.: 0.9777
Epoch 1
5s - loss: 0.0548 - acc.: 0.9570 - val. loss: 0.0252 - val. acc.: 0.9784
Epoch 2
6s - loss: 0.0508 - acc.: 0.9604 - val. loss: 0.0253 - val. acc.: 0.9781
Epoch 3
6s - loss: 0.0492 - acc.: 0.9616 - val. loss: 0.0253 - val. acc.: 0.9818
Epoch 4
7s - loss: 0.0483 - acc.: 0.9624 - val. loss: 0.0229 - val. acc.: 0.9815
Epoch 5
7s - loss: 0.0476 - acc.: 0.9624 - val. loss: 0.0229 - val. acc.: 0.9818
Epoch 6
8s - loss: 0.0478 - acc.: 0.9626 - val. loss: 0.0226 - val. acc.: 0.9816
Epoch 7
8s - loss: 0.0465 - acc.: 0.9651 - val. loss: 0.0215 - val. acc.: 0.9827
Epoch 8
9s - loss: 0.0464 - acc.: 0.9643 - val. loss: 0.0234 - val. acc.: 0.9823
Epoch 9
9s - loss: 0.0464 - acc.: 0.9643 - val. loss: 0.0223 - val. acc.: 0.9821

In [106]:
model = Sequential()
model.add(Dense(168, 100))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(100, 2))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(2, 2))

rms = RMSprop()
model.compile(loss='mse', optimizer=rms)
model.fit(X_train, y_train, batch_size=10, nb_epoch=10, show_accuracy=True, verbose=2, validation_data=(X_test, y_test))
score = model.evaluate(X_test, y_test, show_accuracy=False, verbose=0)


Train on 69979 samples, validate on 30021 samples
Epoch 0
3s - loss: 0.1178 - acc.: 0.8238 - val. loss: 0.0342 - val. acc.: 0.9782
Epoch 1
2s - loss: 0.0765 - acc.: 0.8785 - val. loss: 0.0355 - val. acc.: 0.9766
Epoch 2
3s - loss: 0.0730 - acc.: 0.8818 - val. loss: 0.0375 - val. acc.: 0.9808
Epoch 3
2s - loss: 0.0707 - acc.: 0.8852 - val. loss: 0.0298 - val. acc.: 0.9802
Epoch 4
3s - loss: 0.0706 - acc.: 0.8837 - val. loss: 0.0323 - val. acc.: 0.9806
Epoch 5
3s - loss: 0.0693 - acc.: 0.8866 - val. loss: 0.0303 - val. acc.: 0.9809
Epoch 6
2s - loss: 0.0696 - acc.: 0.8838 - val. loss: 0.0322 - val. acc.: 0.9819
Epoch 7
2s - loss: 0.0692 - acc.: 0.8849 - val. loss: 0.0308 - val. acc.: 0.9820
Epoch 8
3s - loss: 0.0683 - acc.: 0.8870 - val. loss: 0.0313 - val. acc.: 0.9827
Epoch 9
3s - loss: 0.0694 - acc.: 0.8845 - val. loss: 0.0299 - val. acc.: 0.9824

In [107]:
model = Sequential()
model.add(Dense(168, 2))
model.add(Activation('relu'))
model.add(Dropout(0.2))

rms = RMSprop()
model.compile(loss='mse', optimizer=rms)
model.fit(X_train, y_train, batch_size=10, nb_epoch=10, show_accuracy=True, verbose=2, validation_data=(X_test, y_test))
score = model.evaluate(X_test, y_test, show_accuracy=False, verbose=0)


Train on 69979 samples, validate on 30021 samples
Epoch 0
0s - loss: 0.3168 - acc.: 0.5020 - val. loss: 0.2655 - val. acc.: 0.5186
Epoch 1
0s - loss: 0.3086 - acc.: 0.4999 - val. loss: 0.2632 - val. acc.: 0.5210
Epoch 2
0s - loss: 0.3080 - acc.: 0.5003 - val. loss: 0.2715 - val. acc.: 0.4776
Epoch 3
0s - loss: 0.3091 - acc.: 0.4999 - val. loss: 0.2824 - val. acc.: 0.4882
Epoch 4
0s - loss: 0.3088 - acc.: 0.5025 - val. loss: 0.2701 - val. acc.: 0.4901
Epoch 5
0s - loss: 0.3084 - acc.: 0.5016 - val. loss: 0.2681 - val. acc.: 0.5093
Epoch 6
0s - loss: 0.3079 - acc.: 0.5021 - val. loss: 0.2623 - val. acc.: 0.5080
Epoch 7
0s - loss: 0.3084 - acc.: 0.5007 - val. loss: 0.2697 - val. acc.: 0.4952
Epoch 8
0s - loss: 0.3076 - acc.: 0.5043 - val. loss: 0.2671 - val. acc.: 0.4947
Epoch 9
0s - loss: 0.3086 - acc.: 0.5003 - val. loss: 0.2667 - val. acc.: 0.5022

In [108]:
model = Sequential()
model.add(Dense(168, 300))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(300, 2))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(2, 2))

rms = RMSprop()
model.compile(loss='mse', optimizer=rms)
model.fit(X_train, y_train, batch_size=10, nb_epoch=10, show_accuracy=True, verbose=2, validation_data=(X_test, y_test))
score = model.evaluate(X_test, y_test, show_accuracy=False, verbose=0)


Train on 69979 samples, validate on 30021 samples
Epoch 0
6s - loss: 0.1077 - acc.: 0.8823 - val. loss: 0.0289 - val. acc.: 0.9744
Epoch 1
9s - loss: 0.0704 - acc.: 0.9381 - val. loss: 0.0304 - val. acc.: 0.9783
Epoch 2
10s - loss: 0.0663 - acc.: 0.9496 - val. loss: 0.0296 - val. acc.: 0.9790
Epoch 3
13s - loss: 0.0651 - acc.: 0.9519 - val. loss: 0.0314 - val. acc.: 0.9783
Epoch 4
14s - loss: 0.0640 - acc.: 0.9558 - val. loss: 0.0264 - val. acc.: 0.9799
Epoch 5
15s - loss: 0.0639 - acc.: 0.9568 - val. loss: 0.0250 - val. acc.: 0.9810
Epoch 6
16s - loss: 0.0632 - acc.: 0.9583 - val. loss: 0.0237 - val. acc.: 0.9803
Epoch 7
16s - loss: 0.0617 - acc.: 0.9606 - val. loss: 0.0268 - val. acc.: 0.9802
Epoch 8
17s - loss: 0.0616 - acc.: 0.9601 - val. loss: 0.0257 - val. acc.: 0.9810
Epoch 9
17s - loss: 0.0617 - acc.: 0.9615 - val. loss: 0.0250 - val. acc.: 0.9807

In [109]:
def generate_kinetic_distance_pairs(sequences,max_kinetic_distance=1000,n_examples=100000):
    np.random.seed(0)
    pairs = np.zeros((n_examples,2*sequences[0].shape[1]))
    y = np.zeros((n_examples))
    
    for i in range(n_examples):
        sequence = sequences[np.random.randint(len(sequences))]
        pivot = np.random.randint(len(sequence)-max_kinetic_distance)
        kinetic_distance = np.random.randint(1,max_kinetic_distance)
        other = pivot + kinetic_distance
        
        pairs[i] = np.hstack((sequence[pivot],sequence[other]))
        y[i] = kinetic_distance
    y -= y.mean()
    y /= y.std()
    return pairs,y

In [110]:
X,y = generate_kinetic_distance_pairs(dhft)
np.random.seed(0)
mask = np.random.rand(len(X))<0.7
X_train,y_train = X[mask],y[mask]
X_test,y_test = X[-mask],y[-mask]


/Users/joshuafass/anaconda/envs/py27/lib/python2.7/site-packages/IPython/kernel/__main__.py:5: DeprecationWarning: numpy boolean negative (the unary `-` operator) is deprecated, use the bitwise_xor (the `^` operator) or the logical_xor function instead.
/Users/joshuafass/anaconda/envs/py27/lib/python2.7/site-packages/IPython/kernel/__main__.py:5: DeprecationWarning: numpy boolean negative (the unary `-` operator) is deprecated, use the bitwise_xor (the `^` operator) or the logical_xor function instead.

In [111]:
model = Sequential()
model.add(Dense(168, 1))
model.add(Activation('linear'))

rms = RMSprop()
model.compile(loss='mse', optimizer=rms)
model.fit(X_train, y_train, batch_size=20, nb_epoch=20, show_accuracy=True, verbose=2, validation_data=(X_test, y_test))
score = model.evaluate(X_test, y_test, show_accuracy=False, verbose=0)


Train on 69979 samples, validate on 30021 samples
Epoch 0
0s - loss: 1.0210 - acc.: 1.0000 - val. loss: 1.0087 - val. acc.: 1.0000
Epoch 1
0s - loss: 1.0072 - acc.: 1.0000 - val. loss: 1.0117 - val. acc.: 1.0000
Epoch 2
0s - loss: 1.0060 - acc.: 1.0000 - val. loss: 1.0095 - val. acc.: 1.0000
Epoch 3
0s - loss: 1.0057 - acc.: 1.0000 - val. loss: 1.0038 - val. acc.: 1.0000
Epoch 4
0s - loss: 1.0061 - acc.: 1.0000 - val. loss: 1.0204 - val. acc.: 1.0000
Epoch 5
0s - loss: 1.0065 - acc.: 1.0000 - val. loss: 1.0047 - val. acc.: 1.0000
Epoch 6
0s - loss: 1.0054 - acc.: 1.0000 - val. loss: 1.0054 - val. acc.: 1.0000
Epoch 7
0s - loss: 1.0060 - acc.: 1.0000 - val. loss: 1.0044 - val. acc.: 1.0000
Epoch 8
0s - loss: 1.0053 - acc.: 1.0000 - val. loss: 1.0094 - val. acc.: 1.0000
Epoch 9
0s - loss: 1.0055 - acc.: 1.0000 - val. loss: 1.0095 - val. acc.: 1.0000
Epoch 10
0s - loss: 1.0053 - acc.: 1.0000 - val. loss: 1.0109 - val. acc.: 1.0000
Epoch 11
0s - loss: 1.0053 - acc.: 1.0000 - val. loss: 1.0094 - val. acc.: 1.0000
Epoch 12
0s - loss: 1.0054 - acc.: 1.0000 - val. loss: 1.0163 - val. acc.: 1.0000
Epoch 13
0s - loss: 1.0045 - acc.: 1.0000 - val. loss: 1.0145 - val. acc.: 1.0000
Epoch 14
0s - loss: 1.0061 - acc.: 1.0000 - val. loss: 1.0112 - val. acc.: 1.0000
Epoch 15
0s - loss: 1.0059 - acc.: 1.0000 - val. loss: 1.0111 - val. acc.: 1.0000
Epoch 16
0s - loss: 1.0054 - acc.: 1.0000 - val. loss: 1.0114 - val. acc.: 1.0000
Epoch 17
0s - loss: 1.0054 - acc.: 1.0000 - val. loss: 1.0052 - val. acc.: 1.0000
Epoch 18
0s - loss: 1.0049 - acc.: 1.0000 - val. loss: 1.0059 - val. acc.: 1.0000
Epoch 19
0s - loss: 1.0056 - acc.: 1.0000 - val. loss: 1.0055 - val. acc.: 1.0000

In [113]:
y_pred = model.predict(X_test)
plt.hist(y_pred,bins=50);


30021/30021 [==============================] - 0s     

In [116]:
plt.scatter(y_pred,y_test,linewidths=0,alpha=0.5,s=2)
plt.xlabel('linear prediction')
plt.ylabel('actual')
plt.title('kinetic distance')


Out[116]:
<matplotlib.text.Text at 0x11401ee10>

In [72]:
model = Sequential()
model.add(Dense(168, 300))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(300, 20))
model.add(Activation('relu'))
model.add(Dense(20, 1))
model.add(Activation('relu'))

rms = RMSprop()
model.compile(loss='mse', optimizer=rms)
model.fit(X_train, y_train, batch_size=20, nb_epoch=20, show_accuracy=True, verbose=2, validation_data=(X_test, y_test))
score = model.evaluate(X_test, y_test, show_accuracy=False, verbose=0)


Train on 69979 samples, validate on 30021 samples
Epoch 0
10s - loss: 0.9987 - acc.: 1.0000 - val. loss: 1.0031 - val. acc.: 1.0000
Epoch 1
12s - loss: 0.9987 - acc.: 1.0000 - val. loss: 1.0031 - val. acc.: 1.0000
Epoch 2
6s - loss: 0.9986 - acc.: 1.0000 - val. loss: 1.0030 - val. acc.: 1.0000
Epoch 3
6s - loss: 0.9981 - acc.: 1.0000 - val. loss: 1.0024 - val. acc.: 1.0000
Epoch 4
6s - loss: 0.9978 - acc.: 1.0000 - val. loss: 1.0020 - val. acc.: 1.0000
Epoch 5
4s - loss: 0.9973 - acc.: 1.0000 - val. loss: 1.0013 - val. acc.: 1.0000
Epoch 6
4s - loss: 0.9951 - acc.: 1.0000 - val. loss: 0.9912 - val. acc.: 1.0000
Epoch 7
4s - loss: 0.9854 - acc.: 1.0000 - val. loss: 0.9948 - val. acc.: 1.0000
Epoch 8
4s - loss: 0.9785 - acc.: 1.0000 - val. loss: 0.9775 - val. acc.: 1.0000
Epoch 9
4s - loss: 0.9711 - acc.: 1.0000 - val. loss: 0.9735 - val. acc.: 1.0000
Epoch 10
4s - loss: 0.9657 - acc.: 1.0000 - val. loss: 0.9698 - val. acc.: 1.0000
Epoch 11
4s - loss: 0.9615 - acc.: 1.0000 - val. loss: 0.9703 - val. acc.: 1.0000
Epoch 12
4s - loss: 0.9565 - acc.: 1.0000 - val. loss: 0.9605 - val. acc.: 1.0000
Epoch 13
4s - loss: 0.9501 - acc.: 1.0000 - val. loss: 0.9548 - val. acc.: 1.0000
Epoch 14
4s - loss: 0.9435 - acc.: 1.0000 - val. loss: 0.9476 - val. acc.: 1.0000
Epoch 15
4s - loss: 0.9379 - acc.: 1.0000 - val. loss: 0.9605 - val. acc.: 1.0000
Epoch 16
4s - loss: 0.9317 - acc.: 1.0000 - val. loss: 0.9356 - val. acc.: 1.0000
Epoch 17
4s - loss: 0.9198 - acc.: 1.0000 - val. loss: 0.9188 - val. acc.: 1.0000
Epoch 18
4s - loss: 0.9060 - acc.: 1.0000 - val. loss: 0.9069 - val. acc.: 1.0000
Epoch 19
4s - loss: 0.8960 - acc.: 1.0000 - val. loss: 0.8989 - val. acc.: 1.0000

In [75]:
model = Sequential()
model.add(Dense(168, 100))
model.add(Activation('tanh'))
model.add(Dropout(0.2))
model.add(Dense(100, 10))
model.add(Activation('tanh'))
model.add(Dropout(0.2))
model.add(Dense(10, 1))
model.add(Activation('tanh'))

rms = RMSprop()
model.compile(loss='mse', optimizer=rms)
model.fit(X_train, y_train, batch_size=20, nb_epoch=20, show_accuracy=True, verbose=2, validation_data=(X_test, y_test))
score = model.evaluate(X_test, y_test, show_accuracy=False, verbose=0)


Train on 69979 samples, validate on 30021 samples
Epoch 0
1s - loss: 1.0072 - acc.: 1.0000 - val. loss: 1.0052 - val. acc.: 1.0000
Epoch 1
1s - loss: 0.9923 - acc.: 1.0000 - val. loss: 0.9821 - val. acc.: 1.0000
Epoch 2
1s - loss: 0.9635 - acc.: 1.0000 - val. loss: 0.9385 - val. acc.: 1.0000
Epoch 3
1s - loss: 0.9378 - acc.: 1.0000 - val. loss: 0.9150 - val. acc.: 1.0000
Epoch 4
1s - loss: 0.9131 - acc.: 1.0000 - val. loss: 0.8782 - val. acc.: 1.0000
Epoch 5
1s - loss: 0.8918 - acc.: 1.0000 - val. loss: 0.8647 - val. acc.: 1.0000
Epoch 6
1s - loss: 0.8760 - acc.: 1.0000 - val. loss: 0.8358 - val. acc.: 1.0000
Epoch 7
1s - loss: 0.8623 - acc.: 1.0000 - val. loss: 0.8310 - val. acc.: 1.0000
Epoch 8
1s - loss: 0.8535 - acc.: 1.0000 - val. loss: 0.8250 - val. acc.: 1.0000
Epoch 9
1s - loss: 0.8409 - acc.: 1.0000 - val. loss: 0.8285 - val. acc.: 1.0000
Epoch 10
1s - loss: 0.8318 - acc.: 1.0000 - val. loss: 0.8104 - val. acc.: 1.0000
Epoch 11
1s - loss: 0.8230 - acc.: 1.0000 - val. loss: 0.7948 - val. acc.: 1.0000
Epoch 12
1s - loss: 0.8175 - acc.: 1.0000 - val. loss: 0.8044 - val. acc.: 1.0000
Epoch 13
1s - loss: 0.8105 - acc.: 1.0000 - val. loss: 0.7785 - val. acc.: 1.0000
Epoch 14
1s - loss: 0.8064 - acc.: 1.0000 - val. loss: 0.7763 - val. acc.: 1.0000
Epoch 15
1s - loss: 0.8021 - acc.: 1.0000 - val. loss: 0.7713 - val. acc.: 1.0000
Epoch 16
1s - loss: 0.7952 - acc.: 1.0000 - val. loss: 0.7671 - val. acc.: 1.0000
Epoch 17
1s - loss: 0.7866 - acc.: 1.0000 - val. loss: 0.7608 - val. acc.: 1.0000
Epoch 18
1s - loss: 0.7868 - acc.: 1.0000 - val. loss: 0.7772 - val. acc.: 1.0000
Epoch 19
1s - loss: 0.7829 - acc.: 1.0000 - val. loss: 0.7585 - val. acc.: 1.0000

In [76]:
model = Sequential()
model.add(Dense(168, 200))
model.add(Activation('tanh'))
model.add(Dropout(0.2))
model.add(Dense(200, 20))
model.add(Activation('tanh'))
model.add(Dropout(0.2))
model.add(Dense(20, 1))
model.add(Activation('tanh'))

rms = RMSprop()
model.compile(loss='mse', optimizer=rms)
model.fit(X_train, y_train, batch_size=20, nb_epoch=20, show_accuracy=True, verbose=2, validation_data=(X_test, y_test))
score = model.evaluate(X_test, y_test, show_accuracy=False, verbose=0)


Train on 69979 samples, validate on 30021 samples
Epoch 0
2s - loss: 1.0047 - acc.: 1.0000 - val. loss: 0.9856 - val. acc.: 1.0000
Epoch 1
2s - loss: 0.9645 - acc.: 1.0000 - val. loss: 0.9390 - val. acc.: 1.0000
Epoch 2
3s - loss: 0.9356 - acc.: 1.0000 - val. loss: 0.9191 - val. acc.: 1.0000
Epoch 3
3s - loss: 0.9149 - acc.: 1.0000 - val. loss: 0.8959 - val. acc.: 1.0000
Epoch 4
2s - loss: 0.9004 - acc.: 1.0000 - val. loss: 0.8763 - val. acc.: 1.0000
Epoch 5
2s - loss: 0.8832 - acc.: 1.0000 - val. loss: 0.8776 - val. acc.: 1.0000
Epoch 6
3s - loss: 0.8730 - acc.: 1.0000 - val. loss: 0.8699 - val. acc.: 1.0000
Epoch 7
3s - loss: 0.8616 - acc.: 1.0000 - val. loss: 0.8459 - val. acc.: 1.0000
Epoch 8
2s - loss: 0.8500 - acc.: 1.0000 - val. loss: 0.8286 - val. acc.: 1.0000
Epoch 9
2s - loss: 0.8393 - acc.: 1.0000 - val. loss: 0.8303 - val. acc.: 1.0000
Epoch 10
2s - loss: 0.8316 - acc.: 1.0000 - val. loss: 0.8164 - val. acc.: 1.0000
Epoch 11
2s - loss: 0.8216 - acc.: 1.0000 - val. loss: 0.8049 - val. acc.: 1.0000
Epoch 12
2s - loss: 0.8131 - acc.: 1.0000 - val. loss: 0.8010 - val. acc.: 1.0000
Epoch 13
3s - loss: 0.8047 - acc.: 1.0000 - val. loss: 0.7892 - val. acc.: 1.0000
Epoch 14
3s - loss: 0.7987 - acc.: 1.0000 - val. loss: 0.7907 - val. acc.: 1.0000
Epoch 15
3s - loss: 0.7947 - acc.: 1.0000 - val. loss: 0.7710 - val. acc.: 1.0000
Epoch 16
3s - loss: 0.7881 - acc.: 1.0000 - val. loss: 0.7707 - val. acc.: 1.0000
Epoch 17
3s - loss: 0.7836 - acc.: 1.0000 - val. loss: 0.7689 - val. acc.: 1.0000
Epoch 18
3s - loss: 0.7788 - acc.: 1.0000 - val. loss: 0.7665 - val. acc.: 1.0000
Epoch 19
3s - loss: 0.7684 - acc.: 1.0000 - val. loss: 0.7614 - val. acc.: 1.0000

In [125]:
model = Sequential()
model.add(Dense(168, 168))
model.add(Activation('tanh'))
model.add(Dense(168, 500))
model.add(Activation('tanh'))
model.add(Dropout(0.5))
model.add(Dense(500, 50))
model.add(Activation('tanh'))
model.add(Dropout(0.5))
model.add(Dense(50, 1))
model.add(Activation('tanh'))

rms = RMSprop()
model.compile(loss='mae', optimizer=rms)
model.fit(X_train, y_train, batch_size=50, nb_epoch=100, show_accuracy=True, verbose=2, validation_data=(X_test, y_test))
score = model.evaluate(X_test, y_test, show_accuracy=False, verbose=0)


Train on 69979 samples, validate on 30021 samples
Epoch 0
5s - loss: 0.8681 - acc.: 1.0000 - val. loss: 0.8684 - val. acc.: 1.0000
Epoch 1
5s - loss: 0.8656 - acc.: 1.0000 - val. loss: 0.8703 - val. acc.: 1.0000
Epoch 2
5s - loss: 0.8653 - acc.: 1.0000 - val. loss: 0.8694 - val. acc.: 1.0000
Epoch 3
5s - loss: 0.8640 - acc.: 1.0000 - val. loss: 0.8654 - val. acc.: 1.0000
Epoch 4
5s - loss: 0.8563 - acc.: 1.0000 - val. loss: 0.8462 - val. acc.: 1.0000
Epoch 5
5s - loss: 0.8388 - acc.: 1.0000 - val. loss: 0.8487 - val. acc.: 1.0000
Epoch 6
5s - loss: 0.8233 - acc.: 1.0000 - val. loss: 0.8104 - val. acc.: 1.0000
Epoch 7
5s - loss: 0.8103 - acc.: 1.0000 - val. loss: 0.8015 - val. acc.: 1.0000
Epoch 8
5s - loss: 0.7966 - acc.: 1.0000 - val. loss: 0.7918 - val. acc.: 1.0000
Epoch 9
5s - loss: 0.7849 - acc.: 1.0000 - val. loss: 0.7739 - val. acc.: 1.0000
Epoch 10
5s - loss: 0.7751 - acc.: 1.0000 - val. loss: 0.7758 - val. acc.: 1.0000
Epoch 11
5s - loss: 0.7670 - acc.: 1.0000 - val. loss: 0.7604 - val. acc.: 1.0000
Epoch 12
5s - loss: 0.7606 - acc.: 1.0000 - val. loss: 0.7744 - val. acc.: 1.0000
Epoch 13
5s - loss: 0.7552 - acc.: 1.0000 - val. loss: 0.7486 - val. acc.: 1.0000
Epoch 14
5s - loss: 0.7496 - acc.: 1.0000 - val. loss: 0.7726 - val. acc.: 1.0000
Epoch 15
5s - loss: 0.7471 - acc.: 1.0000 - val. loss: 0.7544 - val. acc.: 1.0000
Epoch 16
5s - loss: 0.7410 - acc.: 1.0000 - val. loss: 0.7539 - val. acc.: 1.0000
Epoch 17
5s - loss: 0.7386 - acc.: 1.0000 - val. loss: 0.7518 - val. acc.: 1.0000
Epoch 18
5s - loss: 0.7376 - acc.: 1.0000 - val. loss: 0.7417 - val. acc.: 1.0000
Epoch 19
5s - loss: 0.7318 - acc.: 1.0000 - val. loss: 0.7374 - val. acc.: 1.0000
Epoch 20
5s - loss: 0.7313 - acc.: 1.0000 - val. loss: 0.7551 - val. acc.: 1.0000
Epoch 21
5s - loss: 0.7280 - acc.: 1.0000 - val. loss: 0.7365 - val. acc.: 1.0000
Epoch 22
5s - loss: 0.7241 - acc.: 1.0000 - val. loss: 0.7352 - val. acc.: 1.0000
Epoch 23
5s - loss: 0.7220 - acc.: 1.0000 - val. loss: 0.7530 - val. acc.: 1.0000
Epoch 24
5s - loss: 0.7202 - acc.: 1.0000 - val. loss: 0.7551 - val. acc.: 1.0000
Epoch 25
5s - loss: 0.7176 - acc.: 1.0000 - val. loss: 0.7337 - val. acc.: 1.0000
Epoch 26
5s - loss: 0.7173 - acc.: 1.0000 - val. loss: 0.7715 - val. acc.: 1.0000
Epoch 27
5s - loss: 0.7144 - acc.: 1.0000 - val. loss: 0.7316 - val. acc.: 1.0000
Epoch 28
5s - loss: 0.7132 - acc.: 1.0000 - val. loss: 0.7260 - val. acc.: 1.0000
Epoch 29
5s - loss: 0.7097 - acc.: 1.0000 - val. loss: 0.7275 - val. acc.: 1.0000
Epoch 30
5s - loss: 0.7093 - acc.: 1.0000 - val. loss: 0.7281 - val. acc.: 1.0000
Epoch 31
5s - loss: 0.7069 - acc.: 1.0000 - val. loss: 0.7428 - val. acc.: 1.0000
Epoch 32
5s - loss: 0.7056 - acc.: 1.0000 - val. loss: 0.7234 - val. acc.: 1.0000
Epoch 33
5s - loss: 0.7036 - acc.: 1.0000 - val. loss: 0.7198 - val. acc.: 1.0000
Epoch 34
5s - loss: 0.7008 - acc.: 1.0000 - val. loss: 0.7185 - val. acc.: 1.0000
Epoch 35
5s - loss: 0.6994 - acc.: 1.0000 - val. loss: 0.7355 - val. acc.: 1.0000
Epoch 36
5s - loss: 0.6987 - acc.: 1.0000 - val. loss: 0.7276 - val. acc.: 1.0000
Epoch 37
5s - loss: 0.6974 - acc.: 1.0000 - val. loss: 0.7261 - val. acc.: 1.0000
Epoch 38
5s - loss: 0.6955 - acc.: 1.0000 - val. loss: 0.7175 - val. acc.: 1.0000
Epoch 39
5s - loss: 0.6931 - acc.: 1.0000 - val. loss: 0.7205 - val. acc.: 1.0000
Epoch 40
5s - loss: 0.6925 - acc.: 1.0000 - val. loss: 0.7209 - val. acc.: 1.0000
Epoch 41
5s - loss: 0.6904 - acc.: 1.0000 - val. loss: 0.7200 - val. acc.: 1.0000
Epoch 42
5s - loss: 0.6888 - acc.: 1.0000 - val. loss: 0.7152 - val. acc.: 1.0000
Epoch 43
5s - loss: 0.6866 - acc.: 1.0000 - val. loss: 0.7210 - val. acc.: 1.0000
Epoch 44
5s - loss: 0.6869 - acc.: 1.0000 - val. loss: 0.7200 - val. acc.: 1.0000
Epoch 45
5s - loss: 0.6840 - acc.: 1.0000 - val. loss: 0.7187 - val. acc.: 1.0000
Epoch 46
5s - loss: 0.6818 - acc.: 1.0000 - val. loss: 0.7378 - val. acc.: 1.0000
Epoch 47
5s - loss: 0.6808 - acc.: 1.0000 - val. loss: 0.7123 - val. acc.: 1.0000
Epoch 48
5s - loss: 0.6791 - acc.: 1.0000 - val. loss: 0.7118 - val. acc.: 1.0000
Epoch 49
5s - loss: 0.6767 - acc.: 1.0000 - val. loss: 0.7162 - val. acc.: 1.0000
Epoch 50
5s - loss: 0.6753 - acc.: 1.0000 - val. loss: 0.7142 - val. acc.: 1.0000
Epoch 51
5s - loss: 0.6745 - acc.: 1.0000 - val. loss: 0.7120 - val. acc.: 1.0000
Epoch 52
5s - loss: 0.6738 - acc.: 1.0000 - val. loss: 0.7090 - val. acc.: 1.0000
Epoch 53
5s - loss: 0.6708 - acc.: 1.0000 - val. loss: 0.7069 - val. acc.: 1.0000
Epoch 54
5s - loss: 0.6677 - acc.: 1.0000 - val. loss: 0.7230 - val. acc.: 1.0000
Epoch 55
5s - loss: 0.6676 - acc.: 1.0000 - val. loss: 0.7202 - val. acc.: 1.0000
Epoch 56
5s - loss: 0.6641 - acc.: 1.0000 - val. loss: 0.7108 - val. acc.: 1.0000
Epoch 57
5s - loss: 0.6649 - acc.: 1.0000 - val. loss: 0.7168 - val. acc.: 1.0000
Epoch 58
5s - loss: 0.6631 - acc.: 1.0000 - val. loss: 0.7152 - val. acc.: 1.0000
Epoch 59
5s - loss: 0.6605 - acc.: 1.0000 - val. loss: 0.7466 - val. acc.: 1.0000
Epoch 60
5s - loss: 0.6589 - acc.: 1.0000 - val. loss: 0.7058 - val. acc.: 1.0000
Epoch 61
5s - loss: 0.6590 - acc.: 1.0000 - val. loss: 0.7055 - val. acc.: 1.0000
Epoch 62
5s - loss: 0.6558 - acc.: 1.0000 - val. loss: 0.7172 - val. acc.: 1.0000
Epoch 63
5s - loss: 0.6555 - acc.: 1.0000 - val. loss: 0.7122 - val. acc.: 1.0000
Epoch 64
5s - loss: 0.6543 - acc.: 1.0000 - val. loss: 0.7045 - val. acc.: 1.0000
Epoch 65
5s - loss: 0.6507 - acc.: 1.0000 - val. loss: 0.7114 - val. acc.: 1.0000
Epoch 66
5s - loss: 0.6505 - acc.: 1.0000 - val. loss: 0.7033 - val. acc.: 1.0000
Epoch 67
5s - loss: 0.6492 - acc.: 1.0000 - val. loss: 0.7050 - val. acc.: 1.0000
Epoch 68
5s - loss: 0.6475 - acc.: 1.0000 - val. loss: 0.7045 - val. acc.: 1.0000
Epoch 69
5s - loss: 0.6439 - acc.: 1.0000 - val. loss: 0.7184 - val. acc.: 1.0000
Epoch 70
5s - loss: 0.6440 - acc.: 1.0000 - val. loss: 0.7025 - val. acc.: 1.0000
Epoch 71
5s - loss: 0.6425 - acc.: 1.0000 - val. loss: 0.7073 - val. acc.: 1.0000
Epoch 72
5s - loss: 0.6415 - acc.: 1.0000 - val. loss: 0.7140 - val. acc.: 1.0000
Epoch 73
5s - loss: 0.6399 - acc.: 1.0000 - val. loss: 0.7108 - val. acc.: 1.0000
Epoch 74
5s - loss: 0.6387 - acc.: 1.0000 - val. loss: 0.7024 - val. acc.: 1.0000
Epoch 75
5s - loss: 0.6371 - acc.: 1.0000 - val. loss: 0.7039 - val. acc.: 1.0000
Epoch 76
5s - loss: 0.6351 - acc.: 1.0000 - val. loss: 0.7043 - val. acc.: 1.0000
Epoch 77
5s - loss: 0.6330 - acc.: 1.0000 - val. loss: 0.7131 - val. acc.: 1.0000
Epoch 78
5s - loss: 0.6327 - acc.: 1.0000 - val. loss: 0.7082 - val. acc.: 1.0000
Epoch 79
5s - loss: 0.6321 - acc.: 1.0000 - val. loss: 0.7034 - val. acc.: 1.0000
Epoch 80
5s - loss: 0.6302 - acc.: 1.0000 - val. loss: 0.7059 - val. acc.: 1.0000
Epoch 81
5s - loss: 0.6294 - acc.: 1.0000 - val. loss: 0.7042 - val. acc.: 1.0000
Epoch 82
5s - loss: 0.6272 - acc.: 1.0000 - val. loss: 0.7069 - val. acc.: 1.0000
Epoch 83
5s - loss: 0.6258 - acc.: 1.0000 - val. loss: 0.7046 - val. acc.: 1.0000
Epoch 84
5s - loss: 0.6250 - acc.: 1.0000 - val. loss: 0.7060 - val. acc.: 1.0000
Epoch 85
5s - loss: 0.6245 - acc.: 1.0000 - val. loss: 0.7006 - val. acc.: 1.0000
Epoch 86
5s - loss: 0.6229 - acc.: 1.0000 - val. loss: 0.7010 - val. acc.: 1.0000
Epoch 87
5s - loss: 0.6223 - acc.: 1.0000 - val. loss: 0.7021 - val. acc.: 1.0000
Epoch 88
5s - loss: 0.6202 - acc.: 1.0000 - val. loss: 0.7023 - val. acc.: 1.0000
Epoch 89
5s - loss: 0.6176 - acc.: 1.0000 - val. loss: 0.7031 - val. acc.: 1.0000
Epoch 90
5s - loss: 0.6165 - acc.: 1.0000 - val. loss: 0.7089 - val. acc.: 1.0000
Epoch 91
5s - loss: 0.6158 - acc.: 1.0000 - val. loss: 0.7043 - val. acc.: 1.0000
Epoch 92
5s - loss: 0.6136 - acc.: 1.0000 - val. loss: 0.7034 - val. acc.: 1.0000
Epoch 93
5s - loss: 0.6130 - acc.: 1.0000 - val. loss: 0.7034 - val. acc.: 1.0000
Epoch 94
5s - loss: 0.6117 - acc.: 1.0000 - val. loss: 0.7090 - val. acc.: 1.0000
Epoch 95
5s - loss: 0.6101 - acc.: 1.0000 - val. loss: 0.7017 - val. acc.: 1.0000
Epoch 96
5s - loss: 0.6092 - acc.: 1.0000 - val. loss: 0.7075 - val. acc.: 1.0000
Epoch 97
5s - loss: 0.6071 - acc.: 1.0000 - val. loss: 0.7048 - val. acc.: 1.0000
Epoch 98
5s - loss: 0.6069 - acc.: 1.0000 - val. loss: 0.7097 - val. acc.: 1.0000
Epoch 99
5s - loss: 0.6057 - acc.: 1.0000 - val. loss: 0.7088 - val. acc.: 1.0000
Epoch 100
5s - loss: 0.6050 - acc.: 1.0000 - val. loss: 0.7051 - val. acc.: 1.0000
Epoch 101
5s - loss: 0.6015 - acc.: 1.0000 - val. loss: 0.7031 - val. acc.: 1.0000
Epoch 102
5s - loss: 0.6014 - acc.: 1.0000 - val. loss: 0.7129 - val. acc.: 1.0000
Epoch 103
5s - loss: 0.5987 - acc.: 1.0000 - val. loss: 0.7133 - val. acc.: 1.0000
Epoch 104
5s - loss: 0.5989 - acc.: 1.0000 - val. loss: 0.7034 - val. acc.: 1.0000
Epoch 105
5s - loss: 0.5994 - acc.: 1.0000 - val. loss: 0.7019 - val. acc.: 1.0000
Epoch 106
5s - loss: 0.5960 - acc.: 1.0000 - val. loss: 0.7061 - val. acc.: 1.0000
Epoch 107
5s - loss: 0.5965 - acc.: 1.0000 - val. loss: 0.7046 - val. acc.: 1.0000
Epoch 108
5s - loss: 0.5966 - acc.: 1.0000 - val. loss: 0.7062 - val. acc.: 1.0000
Epoch 109
5s - loss: 0.5934 - acc.: 1.0000 - val. loss: 0.7034 - val. acc.: 1.0000
Epoch 110
5s - loss: 0.5915 - acc.: 1.0000 - val. loss: 0.7072 - val. acc.: 1.0000
Epoch 111
5s - loss: 0.5918 - acc.: 1.0000 - val. loss: 0.7088 - val. acc.: 1.0000
Epoch 112
5s - loss: 0.5889 - acc.: 1.0000 - val. loss: 0.7061 - val. acc.: 1.0000
Epoch 113
5s - loss: 0.5894 - acc.: 1.0000 - val. loss: 0.7111 - val. acc.: 1.0000
Epoch 114
5s - loss: 0.5883 - acc.: 1.0000 - val. loss: 0.7154 - val. acc.: 1.0000
Epoch 115
5s - loss: 0.5859 - acc.: 1.0000 - val. loss: 0.7172 - val. acc.: 1.0000
Epoch 116
5s - loss: 0.5860 - acc.: 1.0000 - val. loss: 0.7303 - val. acc.: 1.0000
Epoch 117
5s - loss: 0.5841 - acc.: 1.0000 - val. loss: 0.7104 - val. acc.: 1.0000
Epoch 118
5s - loss: 0.5825 - acc.: 1.0000 - val. loss: 0.7390 - val. acc.: 1.0000
Epoch 119
5s - loss: 0.5825 - acc.: 1.0000 - val. loss: 0.7210 - val. acc.: 1.0000
Epoch 120
5s - loss: 0.5843 - acc.: 1.0000 - val. loss: 0.7092 - val. acc.: 1.0000
Epoch 121
5s - loss: 0.5806 - acc.: 1.0000 - val. loss: 0.7141 - val. acc.: 1.0000
Epoch 122
5s - loss: 0.5805 - acc.: 1.0000 - val. loss: 0.7073 - val. acc.: 1.0000
Epoch 123
5s - loss: 0.5782 - acc.: 1.0000 - val. loss: 0.7073 - val. acc.: 1.0000
Epoch 124
5s - loss: 0.5772 - acc.: 1.0000 - val. loss: 0.7102 - val. acc.: 1.0000
Epoch 125
5s - loss: 0.5766 - acc.: 1.0000 - val. loss: 0.7419 - val. acc.: 1.0000
Epoch 126
5s - loss: 0.5760 - acc.: 1.0000 - val. loss: 0.7095 - val. acc.: 1.0000
Epoch 127
5s - loss: 0.5721 - acc.: 1.0000 - val. loss: 0.7095 - val. acc.: 1.0000
Epoch 128
5s - loss: 0.5743 - acc.: 1.0000 - val. loss: 0.7122 - val. acc.: 1.0000
Epoch 129
5s - loss: 0.5722 - acc.: 1.0000 - val. loss: 0.7054 - val. acc.: 1.0000
Epoch 130
5s - loss: 0.5732 - acc.: 1.0000 - val. loss: 0.7226 - val. acc.: 1.0000
Epoch 131
5s - loss: 0.5728 - acc.: 1.0000 - val. loss: 0.7105 - val. acc.: 1.0000
Epoch 132
5s - loss: 0.5703 - acc.: 1.0000 - val. loss: 0.7136 - val. acc.: 1.0000
Epoch 133
---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-125-789d397e06e4> in <module>()
     13 rms = RMSprop()
     14 model.compile(loss='mae', optimizer=rms)
---> 15 model.fit(X_train, y_train, batch_size=50, nb_epoch=1000, show_accuracy=True, verbose=2, validation_data=(X_test, y_test))
     16 score = model.evaluate(X_test, y_test, show_accuracy=False, verbose=0)

/Users/joshuafass/anaconda/envs/py27/lib/python2.7/site-packages/Keras-0.0.1-py2.7.egg/keras/models.pyc in fit(self, X, y, batch_size, nb_epoch, verbose, validation_split, validation_data, shuffle, show_accuracy)
    245                 ins = X_batch + [y_batch]
    246                 if show_accuracy:
--> 247                     loss, acc = self._train_with_acc(*ins)
    248                     log_values = [('loss', loss), ('acc.', acc)]
    249                     av_loss += loss * len(batch_ids)

/Users/joshuafass/anaconda/envs/py27/lib/python2.7/site-packages/theano/compile/function_module.pyc in __call__(self, *args, **kwargs)
    593         t0_fn = time.time()
    594         try:
--> 595             outputs = self.fn()
    596         except Exception:
    597             if hasattr(self.fn, 'position_of_error'):

KeyboardInterrupt: 

In [126]:
y_pred = model.predict(X_test)
plt.hist(y_pred,bins=50);

plt.figure()

plt.scatter(y_pred,y_test,linewidths=0,alpha=0.5,s=2)
plt.xlabel('nonlinear prediction (deep tanh net)')
plt.ylabel('actual')
plt.title('kinetic distance')


30021/30021 [==============================] - 0s     
Out[126]:
<matplotlib.text.Text at 0x114fec3d0>

In [132]:
model_relu = Sequential()
model_relu.add(Dense(168, 300))
model_relu.add(Activation('relu'))
model_relu.add(Dropout(0.5))
model_relu.add(Dense(300, 100))
model_relu.add(Activation('relu'))
model_relu.add(Dropout(0.5))
model_relu.add(Dense(100, 50))
model_relu.add(Activation('relu'))
model_relu.add(Dropout(0.5))
model_relu.add(Dense(50, 1))

rms = RMSprop()
model_relu.compile(loss='mae', optimizer=rms)
model_relu.fit(X_train, y_train, batch_size=50, nb_epoch=1000, show_accuracy=True, verbose=2, validation_data=(X_test, y_test))
score = model_relu.evaluate(X_test, y_test, show_accuracy=False, verbose=0)


Train on 69979 samples, validate on 30021 samples
Epoch 0
3s - loss: 0.8661 - acc.: 1.0000 - val. loss: 0.8688 - val. acc.: 1.0000
Epoch 1
3s - loss: 0.8659 - acc.: 1.0000 - val. loss: 0.8688 - val. acc.: 1.0000
Epoch 2
3s - loss: 0.8657 - acc.: 1.0000 - val. loss: 0.8682 - val. acc.: 1.0000
Epoch 3
3s - loss: 0.8645 - acc.: 1.0000 - val. loss: 0.8647 - val. acc.: 1.0000
Epoch 4
3s - loss: 0.8553 - acc.: 1.0000 - val. loss: 0.8384 - val. acc.: 1.0000
Epoch 5
3s - loss: 0.8344 - acc.: 1.0000 - val. loss: 0.8112 - val. acc.: 1.0000
Epoch 6
3s - loss: 0.8168 - acc.: 1.0000 - val. loss: 0.7890 - val. acc.: 1.0000
Epoch 7
3s - loss: 0.8042 - acc.: 1.0000 - val. loss: 0.7849 - val. acc.: 1.0000
Epoch 8
3s - loss: 0.7938 - acc.: 1.0000 - val. loss: 0.7665 - val. acc.: 1.0000
Epoch 9
3s - loss: 0.7841 - acc.: 1.0000 - val. loss: 0.7545 - val. acc.: 1.0000
Epoch 10
3s - loss: 0.7775 - acc.: 1.0000 - val. loss: 0.7623 - val. acc.: 1.0000
Epoch 11
3s - loss: 0.7722 - acc.: 1.0000 - val. loss: 0.7557 - val. acc.: 1.0000
Epoch 12
3s - loss: 0.7658 - acc.: 1.0000 - val. loss: 0.7459 - val. acc.: 1.0000
Epoch 13
3s - loss: 0.7621 - acc.: 1.0000 - val. loss: 0.7413 - val. acc.: 1.0000
Epoch 14
3s - loss: 0.7564 - acc.: 1.0000 - val. loss: 0.7358 - val. acc.: 1.0000
Epoch 15
3s - loss: 0.7536 - acc.: 1.0000 - val. loss: 0.7459 - val. acc.: 1.0000
Epoch 16
3s - loss: 0.7495 - acc.: 1.0000 - val. loss: 0.7237 - val. acc.: 1.0000
Epoch 17
3s - loss: 0.7456 - acc.: 1.0000 - val. loss: 0.7287 - val. acc.: 1.0000
Epoch 18
3s - loss: 0.7444 - acc.: 1.0000 - val. loss: 0.7314 - val. acc.: 1.0000
Epoch 19
3s - loss: 0.7405 - acc.: 1.0000 - val. loss: 0.7222 - val. acc.: 1.0000
Epoch 20
3s - loss: 0.7377 - acc.: 1.0000 - val. loss: 0.7199 - val. acc.: 1.0000
Epoch 21
3s - loss: 0.7338 - acc.: 1.0000 - val. loss: 0.7163 - val. acc.: 1.0000
Epoch 22
3s - loss: 0.7306 - acc.: 1.0000 - val. loss: 0.7259 - val. acc.: 1.0000
Epoch 23
3s - loss: 0.7301 - acc.: 1.0000 - val. loss: 0.7123 - val. acc.: 1.0000
Epoch 24
3s - loss: 0.7270 - acc.: 1.0000 - val. loss: 0.7108 - val. acc.: 1.0000
Epoch 25
3s - loss: 0.7252 - acc.: 1.0000 - val. loss: 0.7119 - val. acc.: 1.0000
Epoch 26
3s - loss: 0.7237 - acc.: 1.0000 - val. loss: 0.7200 - val. acc.: 1.0000
Epoch 27
3s - loss: 0.7215 - acc.: 1.0000 - val. loss: 0.7120 - val. acc.: 1.0000
Epoch 28
3s - loss: 0.7201 - acc.: 1.0000 - val. loss: 0.7106 - val. acc.: 1.0000
Epoch 29
3s - loss: 0.7154 - acc.: 1.0000 - val. loss: 0.7056 - val. acc.: 1.0000
Epoch 30
3s - loss: 0.7156 - acc.: 1.0000 - val. loss: 0.7104 - val. acc.: 1.0000
Epoch 31
3s - loss: 0.7143 - acc.: 1.0000 - val. loss: 0.7089 - val. acc.: 1.0000
Epoch 32
3s - loss: 0.7112 - acc.: 1.0000 - val. loss: 0.7082 - val. acc.: 1.0000
Epoch 33
3s - loss: 0.7119 - acc.: 1.0000 - val. loss: 0.6977 - val. acc.: 1.0000
Epoch 34
3s - loss: 0.7083 - acc.: 1.0000 - val. loss: 0.6989 - val. acc.: 1.0000
Epoch 35
3s - loss: 0.7076 - acc.: 1.0000 - val. loss: 0.7043 - val. acc.: 1.0000
Epoch 36
3s - loss: 0.7060 - acc.: 1.0000 - val. loss: 0.6967 - val. acc.: 1.0000
Epoch 37
3s - loss: 0.7068 - acc.: 1.0000 - val. loss: 0.6952 - val. acc.: 1.0000
Epoch 38
3s - loss: 0.7014 - acc.: 1.0000 - val. loss: 0.7014 - val. acc.: 1.0000
Epoch 39
3s - loss: 0.7035 - acc.: 1.0000 - val. loss: 0.6927 - val. acc.: 1.0000
Epoch 40
3s - loss: 0.6988 - acc.: 1.0000 - val. loss: 0.7016 - val. acc.: 1.0000
Epoch 41
3s - loss: 0.6978 - acc.: 1.0000 - val. loss: 0.6936 - val. acc.: 1.0000
Epoch 42
3s - loss: 0.6962 - acc.: 1.0000 - val. loss: 0.6953 - val. acc.: 1.0000
Epoch 43
3s - loss: 0.6959 - acc.: 1.0000 - val. loss: 0.6894 - val. acc.: 1.0000
Epoch 44
3s - loss: 0.6943 - acc.: 1.0000 - val. loss: 0.6963 - val. acc.: 1.0000
Epoch 45
3s - loss: 0.6938 - acc.: 1.0000 - val. loss: 0.6894 - val. acc.: 1.0000
Epoch 46
3s - loss: 0.6917 - acc.: 1.0000 - val. loss: 0.6862 - val. acc.: 1.0000
Epoch 47
3s - loss: 0.6906 - acc.: 1.0000 - val. loss: 0.6870 - val. acc.: 1.0000
Epoch 48
3s - loss: 0.6902 - acc.: 1.0000 - val. loss: 0.6957 - val. acc.: 1.0000
Epoch 49
3s - loss: 0.6896 - acc.: 1.0000 - val. loss: 0.6892 - val. acc.: 1.0000
Epoch 50
3s - loss: 0.6876 - acc.: 1.0000 - val. loss: 0.6909 - val. acc.: 1.0000
Epoch 51
3s - loss: 0.6877 - acc.: 1.0000 - val. loss: 0.6860 - val. acc.: 1.0000
Epoch 52
3s - loss: 0.6874 - acc.: 1.0000 - val. loss: 0.6891 - val. acc.: 1.0000
Epoch 53
3s - loss: 0.6850 - acc.: 1.0000 - val. loss: 0.6876 - val. acc.: 1.0000
Epoch 54
3s - loss: 0.6875 - acc.: 1.0000 - val. loss: 0.6898 - val. acc.: 1.0000
Epoch 55
3s - loss: 0.6842 - acc.: 1.0000 - val. loss: 0.6868 - val. acc.: 1.0000
Epoch 56
3s - loss: 0.6811 - acc.: 1.0000 - val. loss: 0.6820 - val. acc.: 1.0000
Epoch 57
3s - loss: 0.6820 - acc.: 1.0000 - val. loss: 0.6835 - val. acc.: 1.0000
Epoch 58
3s - loss: 0.6816 - acc.: 1.0000 - val. loss: 0.6847 - val. acc.: 1.0000
Epoch 59
3s - loss: 0.6801 - acc.: 1.0000 - val. loss: 0.6812 - val. acc.: 1.0000
Epoch 60
3s - loss: 0.6813 - acc.: 1.0000 - val. loss: 0.6829 - val. acc.: 1.0000
Epoch 61
3s - loss: 0.6774 - acc.: 1.0000 - val. loss: 0.6862 - val. acc.: 1.0000
Epoch 62
3s - loss: 0.6758 - acc.: 1.0000 - val. loss: 0.6851 - val. acc.: 1.0000
Epoch 63
3s - loss: 0.6754 - acc.: 1.0000 - val. loss: 0.6813 - val. acc.: 1.0000
Epoch 64
3s - loss: 0.6741 - acc.: 1.0000 - val. loss: 0.6795 - val. acc.: 1.0000
Epoch 65
3s - loss: 0.6745 - acc.: 1.0000 - val. loss: 0.6823 - val. acc.: 1.0000
Epoch 66
3s - loss: 0.6747 - acc.: 1.0000 - val. loss: 0.6821 - val. acc.: 1.0000
Epoch 67
3s - loss: 0.6726 - acc.: 1.0000 - val. loss: 0.6824 - val. acc.: 1.0000
Epoch 68
3s - loss: 0.6726 - acc.: 1.0000 - val. loss: 0.6792 - val. acc.: 1.0000
Epoch 69
3s - loss: 0.6720 - acc.: 1.0000 - val. loss: 0.6765 - val. acc.: 1.0000
Epoch 70
3s - loss: 0.6692 - acc.: 1.0000 - val. loss: 0.6811 - val. acc.: 1.0000
Epoch 71
3s - loss: 0.6686 - acc.: 1.0000 - val. loss: 0.6842 - val. acc.: 1.0000
Epoch 72
3s - loss: 0.6672 - acc.: 1.0000 - val. loss: 0.6892 - val. acc.: 1.0000
Epoch 73
3s - loss: 0.6668 - acc.: 1.0000 - val. loss: 0.6767 - val. acc.: 1.0000
Epoch 74
3s - loss: 0.6675 - acc.: 1.0000 - val. loss: 0.6779 - val. acc.: 1.0000
Epoch 75
3s - loss: 0.6675 - acc.: 1.0000 - val. loss: 0.6785 - val. acc.: 1.0000
Epoch 76
3s - loss: 0.6640 - acc.: 1.0000 - val. loss: 0.6775 - val. acc.: 1.0000
Epoch 77
3s - loss: 0.6656 - acc.: 1.0000 - val. loss: 0.6816 - val. acc.: 1.0000
Epoch 78
3s - loss: 0.6641 - acc.: 1.0000 - val. loss: 0.6816 - val. acc.: 1.0000
Epoch 79
3s - loss: 0.6636 - acc.: 1.0000 - val. loss: 0.6839 - val. acc.: 1.0000
Epoch 80
3s - loss: 0.6635 - acc.: 1.0000 - val. loss: 0.6774 - val. acc.: 1.0000
Epoch 81
3s - loss: 0.6641 - acc.: 1.0000 - val. loss: 0.6771 - val. acc.: 1.0000
Epoch 82
3s - loss: 0.6629 - acc.: 1.0000 - val. loss: 0.6793 - val. acc.: 1.0000
Epoch 83
3s - loss: 0.6628 - acc.: 1.0000 - val. loss: 0.6777 - val. acc.: 1.0000
Epoch 84
3s - loss: 0.6620 - acc.: 1.0000 - val. loss: 0.6774 - val. acc.: 1.0000
Epoch 85
3s - loss: 0.6609 - acc.: 1.0000 - val. loss: 0.6775 - val. acc.: 1.0000
Epoch 86
3s - loss: 0.6599 - acc.: 1.0000 - val. loss: 0.6727 - val. acc.: 1.0000
Epoch 87
3s - loss: 0.6585 - acc.: 1.0000 - val. loss: 0.6762 - val. acc.: 1.0000
Epoch 88
3s - loss: 0.6578 - acc.: 1.0000 - val. loss: 0.6738 - val. acc.: 1.0000
Epoch 89
3s - loss: 0.6571 - acc.: 1.0000 - val. loss: 0.6784 - val. acc.: 1.0000
Epoch 90
3s - loss: 0.6584 - acc.: 1.0000 - val. loss: 0.6810 - val. acc.: 1.0000
Epoch 91
3s - loss: 0.6568 - acc.: 1.0000 - val. loss: 0.6742 - val. acc.: 1.0000
Epoch 92
3s - loss: 0.6566 - acc.: 1.0000 - val. loss: 0.6738 - val. acc.: 1.0000
Epoch 93
3s - loss: 0.6569 - acc.: 1.0000 - val. loss: 0.6806 - val. acc.: 1.0000
Epoch 94
3s - loss: 0.6554 - acc.: 1.0000 - val. loss: 0.6812 - val. acc.: 1.0000
Epoch 95
3s - loss: 0.6543 - acc.: 1.0000 - val. loss: 0.6819 - val. acc.: 1.0000
Epoch 96
3s - loss: 0.6554 - acc.: 1.0000 - val. loss: 0.6791 - val. acc.: 1.0000
Epoch 97
3s - loss: 0.6563 - acc.: 1.0000 - val. loss: 0.6797 - val. acc.: 1.0000
Epoch 98
3s - loss: 0.6541 - acc.: 1.0000 - val. loss: 0.6753 - val. acc.: 1.0000
Epoch 99
3s - loss: 0.6515 - acc.: 1.0000 - val. loss: 0.6755 - val. acc.: 1.0000
Epoch 100
3s - loss: 0.6524 - acc.: 1.0000 - val. loss: 0.6735 - val. acc.: 1.0000
Epoch 101
3s - loss: 0.6538 - acc.: 1.0000 - val. loss: 0.6733 - val. acc.: 1.0000
Epoch 102
3s - loss: 0.6508 - acc.: 1.0000 - val. loss: 0.6790 - val. acc.: 1.0000
Epoch 103
3s - loss: 0.6513 - acc.: 1.0000 - val. loss: 0.6717 - val. acc.: 1.0000
Epoch 104
3s - loss: 0.6506 - acc.: 1.0000 - val. loss: 0.6740 - val. acc.: 1.0000
Epoch 105
3s - loss: 0.6528 - acc.: 1.0000 - val. loss: 0.6759 - val. acc.: 1.0000
Epoch 106
3s - loss: 0.6519 - acc.: 1.0000 - val. loss: 0.6768 - val. acc.: 1.0000
Epoch 107
3s - loss: 0.6478 - acc.: 1.0000 - val. loss: 0.6803 - val. acc.: 1.0000
Epoch 108
---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-132-c6a46bbba9a2> in <module>()
     13 rms = RMSprop()
     14 model_relu.compile(loss='mae', optimizer=rms)
---> 15 model_relu.fit(X_train, y_train, batch_size=50, nb_epoch=1000, show_accuracy=True, verbose=2, validation_data=(X_test, y_test))
     16 score = model_relu.evaluate(X_test, y_test, show_accuracy=False, verbose=0)

/Users/joshuafass/anaconda/envs/py27/lib/python2.7/site-packages/Keras-0.0.1-py2.7.egg/keras/models.pyc in fit(self, X, y, batch_size, nb_epoch, verbose, validation_split, validation_data, shuffle, show_accuracy)
    245                 ins = X_batch + [y_batch]
    246                 if show_accuracy:
--> 247                     loss, acc = self._train_with_acc(*ins)
    248                     log_values = [('loss', loss), ('acc.', acc)]
    249                     av_loss += loss * len(batch_ids)

/Users/joshuafass/anaconda/envs/py27/lib/python2.7/site-packages/theano/compile/function_module.pyc in __call__(self, *args, **kwargs)
    593         t0_fn = time.time()
    594         try:
--> 595             outputs = self.fn()
    596         except Exception:
    597             if hasattr(self.fn, 'position_of_error'):

KeyboardInterrupt: 

In [133]:
y_pred = model_relu.predict(X_test)
plt.hist(y_pred,bins=50);

plt.figure()

plt.scatter(y_pred,y_test,linewidths=0,alpha=0.5,s=2)
plt.xlabel('nonlinear prediction (deep relu net)')
plt.ylabel('actual')
plt.title('kinetic distance')


30021/30021 [==============================] - 0s     
Out[133]:
<matplotlib.text.Text at 0x114e07150>

In [134]:
model_relu = Sequential()
model_relu.add(Dense(168, 500))
model_relu.add(Activation('relu'))
model_relu.add(Dropout(0.5))
model_relu.add(Dense(500, 200))
model_relu.add(Activation('relu'))
model_relu.add(Dropout(0.5))
model_relu.add(Dense(200, 50))
model_relu.add(Activation('relu'))
model_relu.add(Dropout(0.5))
model_relu.add(Dense(50, 10))
model_relu.add(Activation('relu'))
model_relu.add(Dense(10, 1))

rms = RMSprop()
model_relu.compile(loss='mae', optimizer=rms)
model_relu.fit(X_train, y_train, batch_size=50, nb_epoch=1000, show_accuracy=True, verbose=2, validation_data=(X_test, y_test))
score = model_relu.evaluate(X_test, y_test, show_accuracy=False, verbose=0)


Train on 69979 samples, validate on 30021 samples
Epoch 0
5s - loss: 0.8660 - acc.: 1.0000 - val. loss: 0.8687 - val. acc.: 1.0000
Epoch 1
5s - loss: 0.8659 - acc.: 1.0000 - val. loss: 0.8686 - val. acc.: 1.0000
Epoch 2
5s - loss: 0.8658 - acc.: 1.0000 - val. loss: 0.8687 - val. acc.: 1.0000
Epoch 3
5s - loss: 0.8653 - acc.: 1.0000 - val. loss: 0.8674 - val. acc.: 1.0000
Epoch 4
5s - loss: 0.8641 - acc.: 1.0000 - val. loss: 0.8632 - val. acc.: 1.0000
Epoch 5
5s - loss: 0.8577 - acc.: 1.0000 - val. loss: 0.8485 - val. acc.: 1.0000
Epoch 6
5s - loss: 0.8404 - acc.: 1.0000 - val. loss: 0.8155 - val. acc.: 1.0000
Epoch 7
5s - loss: 0.8185 - acc.: 1.0000 - val. loss: 0.7957 - val. acc.: 1.0000
Epoch 8
5s - loss: 0.8037 - acc.: 1.0000 - val. loss: 0.7788 - val. acc.: 1.0000
Epoch 9
5s - loss: 0.7913 - acc.: 1.0000 - val. loss: 0.7741 - val. acc.: 1.0000
Epoch 10
5s - loss: 0.7822 - acc.: 1.0000 - val. loss: 0.7642 - val. acc.: 1.0000
Epoch 11
5s - loss: 0.7743 - acc.: 1.0000 - val. loss: 0.7763 - val. acc.: 1.0000
Epoch 12
5s - loss: 0.7670 - acc.: 1.0000 - val. loss: 0.7411 - val. acc.: 1.0000
Epoch 13
5s - loss: 0.7615 - acc.: 1.0000 - val. loss: 0.7435 - val. acc.: 1.0000
Epoch 14
5s - loss: 0.7550 - acc.: 1.0000 - val. loss: 0.7448 - val. acc.: 1.0000
Epoch 15
5s - loss: 0.7503 - acc.: 1.0000 - val. loss: 0.7355 - val. acc.: 1.0000
Epoch 16
5s - loss: 0.7460 - acc.: 1.0000 - val. loss: 0.7298 - val. acc.: 1.0000
Epoch 17
5s - loss: 0.7420 - acc.: 1.0000 - val. loss: 0.7299 - val. acc.: 1.0000
Epoch 18
5s - loss: 0.7365 - acc.: 1.0000 - val. loss: 0.7206 - val. acc.: 1.0000
Epoch 19
5s - loss: 0.7345 - acc.: 1.0000 - val. loss: 0.7225 - val. acc.: 1.0000
Epoch 20
5s - loss: 0.7328 - acc.: 1.0000 - val. loss: 0.7236 - val. acc.: 1.0000
Epoch 21
5s - loss: 0.7246 - acc.: 1.0000 - val. loss: 0.7159 - val. acc.: 1.0000
Epoch 22
5s - loss: 0.7215 - acc.: 1.0000 - val. loss: 0.7118 - val. acc.: 1.0000
Epoch 23
5s - loss: 0.7191 - acc.: 1.0000 - val. loss: 0.7133 - val. acc.: 1.0000
Epoch 24
5s - loss: 0.7196 - acc.: 1.0000 - val. loss: 0.7148 - val. acc.: 1.0000
Epoch 25
5s - loss: 0.7132 - acc.: 1.0000 - val. loss: 0.7092 - val. acc.: 1.0000
Epoch 26
6s - loss: 0.7110 - acc.: 1.0000 - val. loss: 0.7063 - val. acc.: 1.0000
Epoch 27
5s - loss: 0.7095 - acc.: 1.0000 - val. loss: 0.7005 - val. acc.: 1.0000
Epoch 28
5s - loss: 0.7072 - acc.: 1.0000 - val. loss: 0.7015 - val. acc.: 1.0000
Epoch 29
5s - loss: 0.7041 - acc.: 1.0000 - val. loss: 0.7028 - val. acc.: 1.0000
Epoch 30
5s - loss: 0.7005 - acc.: 1.0000 - val. loss: 0.6971 - val. acc.: 1.0000
Epoch 31
5s - loss: 0.6993 - acc.: 1.0000 - val. loss: 0.6957 - val. acc.: 1.0000
Epoch 32
5s - loss: 0.6991 - acc.: 1.0000 - val. loss: 0.7078 - val. acc.: 1.0000
Epoch 33
5s - loss: 0.6943 - acc.: 1.0000 - val. loss: 0.6917 - val. acc.: 1.0000
Epoch 34
5s - loss: 0.6944 - acc.: 1.0000 - val. loss: 0.6919 - val. acc.: 1.0000
Epoch 35
5s - loss: 0.6923 - acc.: 1.0000 - val. loss: 0.6943 - val. acc.: 1.0000
Epoch 36
5s - loss: 0.6898 - acc.: 1.0000 - val. loss: 0.6960 - val. acc.: 1.0000
Epoch 37
5s - loss: 0.6890 - acc.: 1.0000 - val. loss: 0.7033 - val. acc.: 1.0000
Epoch 38
5s - loss: 0.6852 - acc.: 1.0000 - val. loss: 0.6927 - val. acc.: 1.0000
Epoch 39
5s - loss: 0.6849 - acc.: 1.0000 - val. loss: 0.6914 - val. acc.: 1.0000
Epoch 40
5s - loss: 0.6824 - acc.: 1.0000 - val. loss: 0.6897 - val. acc.: 1.0000
Epoch 41
5s - loss: 0.6794 - acc.: 1.0000 - val. loss: 0.6882 - val. acc.: 1.0000
Epoch 42
5s - loss: 0.6793 - acc.: 1.0000 - val. loss: 0.6847 - val. acc.: 1.0000
Epoch 43
5s - loss: 0.6772 - acc.: 1.0000 - val. loss: 0.6901 - val. acc.: 1.0000
Epoch 44
5s - loss: 0.6765 - acc.: 1.0000 - val. loss: 0.6843 - val. acc.: 1.0000
Epoch 45
5s - loss: 0.6741 - acc.: 1.0000 - val. loss: 0.6843 - val. acc.: 1.0000
Epoch 46
5s - loss: 0.6729 - acc.: 1.0000 - val. loss: 0.6839 - val. acc.: 1.0000
Epoch 47
5s - loss: 0.6710 - acc.: 1.0000 - val. loss: 0.6840 - val. acc.: 1.0000
Epoch 48
6s - loss: 0.6699 - acc.: 1.0000 - val. loss: 0.6849 - val. acc.: 1.0000
Epoch 49
6s - loss: 0.6680 - acc.: 1.0000 - val. loss: 0.6803 - val. acc.: 1.0000
Epoch 50
6s - loss: 0.6661 - acc.: 1.0000 - val. loss: 0.6935 - val. acc.: 1.0000
Epoch 51
6s - loss: 0.6673 - acc.: 1.0000 - val. loss: 0.6762 - val. acc.: 1.0000
Epoch 52
5s - loss: 0.6659 - acc.: 1.0000 - val. loss: 0.6801 - val. acc.: 1.0000
Epoch 53
5s - loss: 0.6636 - acc.: 1.0000 - val. loss: 0.6821 - val. acc.: 1.0000
Epoch 54
6s - loss: 0.6639 - acc.: 1.0000 - val. loss: 0.6766 - val. acc.: 1.0000
Epoch 55
5s - loss: 0.6621 - acc.: 1.0000 - val. loss: 0.6813 - val. acc.: 1.0000
Epoch 56
5s - loss: 0.6608 - acc.: 1.0000 - val. loss: 0.6819 - val. acc.: 1.0000
Epoch 57
5s - loss: 0.6589 - acc.: 1.0000 - val. loss: 0.6813 - val. acc.: 1.0000
Epoch 58
5s - loss: 0.6580 - acc.: 1.0000 - val. loss: 0.6808 - val. acc.: 1.0000
Epoch 59
5s - loss: 0.6570 - acc.: 1.0000 - val. loss: 0.6780 - val. acc.: 1.0000
Epoch 60
5s - loss: 0.6561 - acc.: 1.0000 - val. loss: 0.6778 - val. acc.: 1.0000
Epoch 61
6s - loss: 0.6572 - acc.: 1.0000 - val. loss: 0.6868 - val. acc.: 1.0000
Epoch 62
5s - loss: 0.6534 - acc.: 1.0000 - val. loss: 0.6876 - val. acc.: 1.0000
Epoch 63
6s - loss: 0.6525 - acc.: 1.0000 - val. loss: 0.6778 - val. acc.: 1.0000
Epoch 64
5s - loss: 0.6492 - acc.: 1.0000 - val. loss: 0.6854 - val. acc.: 1.0000
Epoch 65
5s - loss: 0.6486 - acc.: 1.0000 - val. loss: 0.6858 - val. acc.: 1.0000
Epoch 66
5s - loss: 0.6482 - acc.: 1.0000 - val. loss: 0.6822 - val. acc.: 1.0000
Epoch 67
6s - loss: 0.6464 - acc.: 1.0000 - val. loss: 0.6827 - val. acc.: 1.0000
Epoch 68
6s - loss: 0.6482 - acc.: 1.0000 - val. loss: 0.6821 - val. acc.: 1.0000
Epoch 69
6s - loss: 0.6447 - acc.: 1.0000 - val. loss: 0.6774 - val. acc.: 1.0000
Epoch 70
6s - loss: 0.6431 - acc.: 1.0000 - val. loss: 0.6907 - val. acc.: 1.0000
Epoch 71
6s - loss: 0.6456 - acc.: 1.0000 - val. loss: 0.6822 - val. acc.: 1.0000
Epoch 72
6s - loss: 0.6429 - acc.: 1.0000 - val. loss: 0.6769 - val. acc.: 1.0000
Epoch 73
5s - loss: 0.6424 - acc.: 1.0000 - val. loss: 0.6885 - val. acc.: 1.0000
Epoch 74
6s - loss: 0.6409 - acc.: 1.0000 - val. loss: 0.6843 - val. acc.: 1.0000
Epoch 75
6s - loss: 0.6395 - acc.: 1.0000 - val. loss: 0.6889 - val. acc.: 1.0000
Epoch 76
6s - loss: 0.6372 - acc.: 1.0000 - val. loss: 0.6827 - val. acc.: 1.0000
Epoch 77
5s - loss: 0.6385 - acc.: 1.0000 - val. loss: 0.6857 - val. acc.: 1.0000
Epoch 78
6s - loss: 0.6358 - acc.: 1.0000 - val. loss: 0.6822 - val. acc.: 1.0000
Epoch 79
6s - loss: 0.6369 - acc.: 1.0000 - val. loss: 0.6835 - val. acc.: 1.0000
Epoch 80
6s - loss: 0.6348 - acc.: 1.0000 - val. loss: 0.6827 - val. acc.: 1.0000
Epoch 81
6s - loss: 0.6337 - acc.: 1.0000 - val. loss: 0.6839 - val. acc.: 1.0000
Epoch 82
6s - loss: 0.6320 - acc.: 1.0000 - val. loss: 0.6907 - val. acc.: 1.0000
Epoch 83
6s - loss: 0.6319 - acc.: 1.0000 - val. loss: 0.6883 - val. acc.: 1.0000
Epoch 84
6s - loss: 0.6319 - acc.: 1.0000 - val. loss: 0.6902 - val. acc.: 1.0000
Epoch 85
6s - loss: 0.6308 - acc.: 1.0000 - val. loss: 0.6889 - val. acc.: 1.0000
Epoch 86
6s - loss: 0.6294 - acc.: 1.0000 - val. loss: 0.6838 - val. acc.: 1.0000
Epoch 87
6s - loss: 0.6294 - acc.: 1.0000 - val. loss: 0.6845 - val. acc.: 1.0000
Epoch 88
6s - loss: 0.6278 - acc.: 1.0000 - val. loss: 0.6854 - val. acc.: 1.0000
Epoch 89
6s - loss: 0.6271 - acc.: 1.0000 - val. loss: 0.6817 - val. acc.: 1.0000
Epoch 90
6s - loss: 0.6269 - acc.: 1.0000 - val. loss: 0.6820 - val. acc.: 1.0000
Epoch 91
6s - loss: 0.6274 - acc.: 1.0000 - val. loss: 0.6903 - val. acc.: 1.0000
Epoch 92
6s - loss: 0.6263 - acc.: 1.0000 - val. loss: 0.6861 - val. acc.: 1.0000
Epoch 93
6s - loss: 0.6243 - acc.: 1.0000 - val. loss: 0.6883 - val. acc.: 1.0000
Epoch 94
6s - loss: 0.6247 - acc.: 1.0000 - val. loss: 0.6850 - val. acc.: 1.0000
Epoch 95
6s - loss: 0.6235 - acc.: 1.0000 - val. loss: 0.6823 - val. acc.: 1.0000
Epoch 96
6s - loss: 0.6229 - acc.: 1.0000 - val. loss: 0.6892 - val. acc.: 1.0000
Epoch 97
6s - loss: 0.6209 - acc.: 1.0000 - val. loss: 0.6860 - val. acc.: 1.0000
Epoch 98
6s - loss: 0.6203 - acc.: 1.0000 - val. loss: 0.6836 - val. acc.: 1.0000
Epoch 99
6s - loss: 0.6198 - acc.: 1.0000 - val. loss: 0.6851 - val. acc.: 1.0000
Epoch 100
6s - loss: 0.6204 - acc.: 1.0000 - val. loss: 0.6821 - val. acc.: 1.0000
Epoch 101
6s - loss: 0.6199 - acc.: 1.0000 - val. loss: 0.6868 - val. acc.: 1.0000
Epoch 102
6s - loss: 0.6171 - acc.: 1.0000 - val. loss: 0.6884 - val. acc.: 1.0000
Epoch 103
6s - loss: 0.6180 - acc.: 1.0000 - val. loss: 0.6840 - val. acc.: 1.0000
Epoch 104
5s - loss: 0.6185 - acc.: 1.0000 - val. loss: 0.6892 - val. acc.: 1.0000
Epoch 105
6s - loss: 0.6158 - acc.: 1.0000 - val. loss: 0.6832 - val. acc.: 1.0000
Epoch 106
6s - loss: 0.6152 - acc.: 1.0000 - val. loss: 0.6909 - val. acc.: 1.0000
Epoch 107
5s - loss: 0.6147 - acc.: 1.0000 - val. loss: 0.6853 - val. acc.: 1.0000
Epoch 108
6s - loss: 0.6134 - acc.: 1.0000 - val. loss: 0.6861 - val. acc.: 1.0000
Epoch 109
6s - loss: 0.6122 - acc.: 1.0000 - val. loss: 0.6854 - val. acc.: 1.0000
Epoch 110
6s - loss: 0.6123 - acc.: 1.0000 - val. loss: 0.6860 - val. acc.: 1.0000
Epoch 111
6s - loss: 0.6109 - acc.: 1.0000 - val. loss: 0.6906 - val. acc.: 1.0000
Epoch 112
6s - loss: 0.6130 - acc.: 1.0000 - val. loss: 0.6891 - val. acc.: 1.0000
Epoch 113
6s - loss: 0.6112 - acc.: 1.0000 - val. loss: 0.6857 - val. acc.: 1.0000
Epoch 114
6s - loss: 0.6092 - acc.: 1.0000 - val. loss: 0.6877 - val. acc.: 1.0000
Epoch 115
6s - loss: 0.6094 - acc.: 1.0000 - val. loss: 0.6872 - val. acc.: 1.0000
Epoch 116
6s - loss: 0.6081 - acc.: 1.0000 - val. loss: 0.6886 - val. acc.: 1.0000
Epoch 117
6s - loss: 0.6077 - acc.: 1.0000 - val. loss: 0.6862 - val. acc.: 1.0000
Epoch 118
6s - loss: 0.6064 - acc.: 1.0000 - val. loss: 0.6848 - val. acc.: 1.0000
Epoch 119
6s - loss: 0.6067 - acc.: 1.0000 - val. loss: 0.6896 - val. acc.: 1.0000
Epoch 120
6s - loss: 0.6055 - acc.: 1.0000 - val. loss: 0.6903 - val. acc.: 1.0000
Epoch 121
6s - loss: 0.6072 - acc.: 1.0000 - val. loss: 0.6822 - val. acc.: 1.0000
Epoch 122
6s - loss: 0.6042 - acc.: 1.0000 - val. loss: 0.6893 - val. acc.: 1.0000
Epoch 123
6s - loss: 0.6057 - acc.: 1.0000 - val. loss: 0.6867 - val. acc.: 1.0000
Epoch 124
6s - loss: 0.6036 - acc.: 1.0000 - val. loss: 0.6892 - val. acc.: 1.0000
Epoch 125
6s - loss: 0.6033 - acc.: 1.0000 - val. loss: 0.6942 - val. acc.: 1.0000
Epoch 126
6s - loss: 0.6013 - acc.: 1.0000 - val. loss: 0.6871 - val. acc.: 1.0000
Epoch 127
6s - loss: 0.6008 - acc.: 1.0000 - val. loss: 0.6886 - val. acc.: 1.0000
Epoch 128
6s - loss: 0.6026 - acc.: 1.0000 - val. loss: 0.6891 - val. acc.: 1.0000
Epoch 129
6s - loss: 0.6005 - acc.: 1.0000 - val. loss: 0.6868 - val. acc.: 1.0000
Epoch 130
6s - loss: 0.6003 - acc.: 1.0000 - val. loss: 0.7001 - val. acc.: 1.0000
Epoch 131
6s - loss: 0.6010 - acc.: 1.0000 - val. loss: 0.6947 - val. acc.: 1.0000
Epoch 132
6s - loss: 0.5994 - acc.: 1.0000 - val. loss: 0.6883 - val. acc.: 1.0000
Epoch 133
6s - loss: 0.5975 - acc.: 1.0000 - val. loss: 0.6966 - val. acc.: 1.0000
Epoch 134
6s - loss: 0.5960 - acc.: 1.0000 - val. loss: 0.6899 - val. acc.: 1.0000
Epoch 135
6s - loss: 0.5972 - acc.: 1.0000 - val. loss: 0.6929 - val. acc.: 1.0000
Epoch 136
6s - loss: 0.5978 - acc.: 1.0000 - val. loss: 0.6936 - val. acc.: 1.0000
Epoch 137
6s - loss: 0.5973 - acc.: 1.0000 - val. loss: 0.6835 - val. acc.: 1.0000
Epoch 138
6s - loss: 0.5968 - acc.: 1.0000 - val. loss: 0.6845 - val. acc.: 1.0000
Epoch 139
6s - loss: 0.5951 - acc.: 1.0000 - val. loss: 0.6903 - val. acc.: 1.0000
Epoch 140
6s - loss: 0.5938 - acc.: 1.0000 - val. loss: 0.6881 - val. acc.: 1.0000
Epoch 141
6s - loss: 0.5963 - acc.: 1.0000 - val. loss: 0.6917 - val. acc.: 1.0000
Epoch 142
6s - loss: 0.5923 - acc.: 1.0000 - val. loss: 0.6884 - val. acc.: 1.0000
Epoch 143
6s - loss: 0.5933 - acc.: 1.0000 - val. loss: 0.6845 - val. acc.: 1.0000
Epoch 144
6s - loss: 0.5923 - acc.: 1.0000 - val. loss: 0.6868 - val. acc.: 1.0000
Epoch 145
6s - loss: 0.5927 - acc.: 1.0000 - val. loss: 0.6900 - val. acc.: 1.0000
Epoch 146
6s - loss: 0.5915 - acc.: 1.0000 - val. loss: 0.6819 - val. acc.: 1.0000
Epoch 147
6s - loss: 0.5914 - acc.: 1.0000 - val. loss: 0.6877 - val. acc.: 1.0000
Epoch 148
6s - loss: 0.5921 - acc.: 1.0000 - val. loss: 0.6882 - val. acc.: 1.0000
Epoch 149
6s - loss: 0.5912 - acc.: 1.0000 - val. loss: 0.6882 - val. acc.: 1.0000
Epoch 150
6s - loss: 0.5902 - acc.: 1.0000 - val. loss: 0.6864 - val. acc.: 1.0000
Epoch 151
6s - loss: 0.5907 - acc.: 1.0000 - val. loss: 0.6842 - val. acc.: 1.0000
Epoch 152
6s - loss: 0.5887 - acc.: 1.0000 - val. loss: 0.6860 - val. acc.: 1.0000
Epoch 153
6s - loss: 0.5872 - acc.: 1.0000 - val. loss: 0.6898 - val. acc.: 1.0000
Epoch 154
6s - loss: 0.5881 - acc.: 1.0000 - val. loss: 0.6957 - val. acc.: 1.0000
Epoch 155
6s - loss: 0.5878 - acc.: 1.0000 - val. loss: 0.6825 - val. acc.: 1.0000
Epoch 156
6s - loss: 0.5877 - acc.: 1.0000 - val. loss: 0.6881 - val. acc.: 1.0000
Epoch 157
6s - loss: 0.5851 - acc.: 1.0000 - val. loss: 0.6833 - val. acc.: 1.0000
Epoch 158
6s - loss: 0.5872 - acc.: 1.0000 - val. loss: 0.6890 - val. acc.: 1.0000
Epoch 159
6s - loss: 0.5872 - acc.: 1.0000 - val. loss: 0.6895 - val. acc.: 1.0000
Epoch 160
6s - loss: 0.5865 - acc.: 1.0000 - val. loss: 0.6947 - val. acc.: 1.0000
Epoch 161
6s - loss: 0.5863 - acc.: 1.0000 - val. loss: 0.6902 - val. acc.: 1.0000
Epoch 162
6s - loss: 0.5836 - acc.: 1.0000 - val. loss: 0.6865 - val. acc.: 1.0000
Epoch 163
6s - loss: 0.5840 - acc.: 1.0000 - val. loss: 0.6955 - val. acc.: 1.0000
Epoch 164
6s - loss: 0.5850 - acc.: 1.0000 - val. loss: 0.6886 - val. acc.: 1.0000
Epoch 165
6s - loss: 0.5845 - acc.: 1.0000 - val. loss: 0.6945 - val. acc.: 1.0000
Epoch 166
6s - loss: 0.5842 - acc.: 1.0000 - val. loss: 0.6888 - val. acc.: 1.0000
Epoch 167
6s - loss: 0.5823 - acc.: 1.0000 - val. loss: 0.6897 - val. acc.: 1.0000
Epoch 168
6s - loss: 0.5821 - acc.: 1.0000 - val. loss: 0.6822 - val. acc.: 1.0000
Epoch 169
6s - loss: 0.5821 - acc.: 1.0000 - val. loss: 0.6866 - val. acc.: 1.0000
Epoch 170
6s - loss: 0.5824 - acc.: 1.0000 - val. loss: 0.6997 - val. acc.: 1.0000
Epoch 171
6s - loss: 0.5818 - acc.: 1.0000 - val. loss: 0.6872 - val. acc.: 1.0000
Epoch 172
6s - loss: 0.5843 - acc.: 1.0000 - val. loss: 0.6959 - val. acc.: 1.0000
Epoch 173
6s - loss: 0.5808 - acc.: 1.0000 - val. loss: 0.6905 - val. acc.: 1.0000
Epoch 174
6s - loss: 0.5799 - acc.: 1.0000 - val. loss: 0.7000 - val. acc.: 1.0000
Epoch 175
6s - loss: 0.5786 - acc.: 1.0000 - val. loss: 0.6896 - val. acc.: 1.0000
Epoch 176
6s - loss: 0.5796 - acc.: 1.0000 - val. loss: 0.6933 - val. acc.: 1.0000
Epoch 177
6s - loss: 0.5775 - acc.: 1.0000 - val. loss: 0.6902 - val. acc.: 1.0000
Epoch 178
6s - loss: 0.5791 - acc.: 1.0000 - val. loss: 0.6947 - val. acc.: 1.0000
Epoch 179
6s - loss: 0.5770 - acc.: 1.0000 - val. loss: 0.6874 - val. acc.: 1.0000
Epoch 180
6s - loss: 0.5775 - acc.: 1.0000 - val. loss: 0.6867 - val. acc.: 1.0000
Epoch 181
6s - loss: 0.5764 - acc.: 1.0000 - val. loss: 0.6949 - val. acc.: 1.0000
Epoch 182
6s - loss: 0.5761 - acc.: 1.0000 - val. loss: 0.6911 - val. acc.: 1.0000
Epoch 183
6s - loss: 0.5761 - acc.: 1.0000 - val. loss: 0.6926 - val. acc.: 1.0000
Epoch 184
6s - loss: 0.5766 - acc.: 1.0000 - val. loss: 0.6977 - val. acc.: 1.0000
Epoch 185
6s - loss: 0.5743 - acc.: 1.0000 - val. loss: 0.6889 - val. acc.: 1.0000
Epoch 186
6s - loss: 0.5724 - acc.: 1.0000 - val. loss: 0.6904 - val. acc.: 1.0000
Epoch 187
6s - loss: 0.5761 - acc.: 1.0000 - val. loss: 0.7013 - val. acc.: 1.0000
Epoch 188
6s - loss: 0.5758 - acc.: 1.0000 - val. loss: 0.6902 - val. acc.: 1.0000
Epoch 189
6s - loss: 0.5734 - acc.: 1.0000 - val. loss: 0.6927 - val. acc.: 1.0000
Epoch 190
6s - loss: 0.5754 - acc.: 1.0000 - val. loss: 0.6900 - val. acc.: 1.0000
Epoch 191
6s - loss: 0.5729 - acc.: 1.0000 - val. loss: 0.6923 - val. acc.: 1.0000
Epoch 192
6s - loss: 0.5728 - acc.: 1.0000 - val. loss: 0.6965 - val. acc.: 1.0000
Epoch 193
6s - loss: 0.5723 - acc.: 1.0000 - val. loss: 0.6910 - val. acc.: 1.0000
Epoch 194
6s - loss: 0.5749 - acc.: 1.0000 - val. loss: 0.6878 - val. acc.: 1.0000
Epoch 195
6s - loss: 0.5736 - acc.: 1.0000 - val. loss: 0.6865 - val. acc.: 1.0000
Epoch 196
6s - loss: 0.5732 - acc.: 1.0000 - val. loss: 0.6961 - val. acc.: 1.0000
Epoch 197
6s - loss: 0.5704 - acc.: 1.0000 - val. loss: 0.6975 - val. acc.: 1.0000
Epoch 198
6s - loss: 0.5720 - acc.: 1.0000 - val. loss: 0.6909 - val. acc.: 1.0000
Epoch 199
6s - loss: 0.5697 - acc.: 1.0000 - val. loss: 0.6916 - val. acc.: 1.0000
Epoch 200
6s - loss: 0.5704 - acc.: 1.0000 - val. loss: 0.6931 - val. acc.: 1.0000
Epoch 201
6s - loss: 0.5716 - acc.: 1.0000 - val. loss: 0.6911 - val. acc.: 1.0000
Epoch 202
6s - loss: 0.5712 - acc.: 1.0000 - val. loss: 0.6963 - val. acc.: 1.0000
Epoch 203
6s - loss: 0.5700 - acc.: 1.0000 - val. loss: 0.6915 - val. acc.: 1.0000
Epoch 204
6s - loss: 0.5673 - acc.: 1.0000 - val. loss: 0.7035 - val. acc.: 1.0000
Epoch 205
6s - loss: 0.5699 - acc.: 1.0000 - val. loss: 0.6978 - val. acc.: 1.0000
Epoch 206
6s - loss: 0.5685 - acc.: 1.0000 - val. loss: 0.6899 - val. acc.: 1.0000
Epoch 207
6s - loss: 0.5687 - acc.: 1.0000 - val. loss: 0.7010 - val. acc.: 1.0000
Epoch 208
6s - loss: 0.5701 - acc.: 1.0000 - val. loss: 0.6916 - val. acc.: 1.0000
Epoch 209
6s - loss: 0.5674 - acc.: 1.0000 - val. loss: 0.6815 - val. acc.: 1.0000
Epoch 210
6s - loss: 0.5671 - acc.: 1.0000 - val. loss: 0.6875 - val. acc.: 1.0000
Epoch 211
6s - loss: 0.5677 - acc.: 1.0000 - val. loss: 0.6907 - val. acc.: 1.0000
Epoch 212
6s - loss: 0.5679 - acc.: 1.0000 - val. loss: 0.7070 - val. acc.: 1.0000
Epoch 213
6s - loss: 0.5650 - acc.: 1.0000 - val. loss: 0.6837 - val. acc.: 1.0000
Epoch 214
6s - loss: 0.5643 - acc.: 1.0000 - val. loss: 0.6874 - val. acc.: 1.0000
Epoch 215
6s - loss: 0.5664 - acc.: 1.0000 - val. loss: 0.6895 - val. acc.: 1.0000
Epoch 216
6s - loss: 0.5641 - acc.: 1.0000 - val. loss: 0.6879 - val. acc.: 1.0000
Epoch 217
6s - loss: 0.5647 - acc.: 1.0000 - val. loss: 0.6892 - val. acc.: 1.0000
Epoch 218
6s - loss: 0.5676 - acc.: 1.0000 - val. loss: 0.6943 - val. acc.: 1.0000
Epoch 219
6s - loss: 0.5628 - acc.: 1.0000 - val. loss: 0.6911 - val. acc.: 1.0000
Epoch 220
6s - loss: 0.5637 - acc.: 1.0000 - val. loss: 0.6917 - val. acc.: 1.0000
Epoch 221
6s - loss: 0.5636 - acc.: 1.0000 - val. loss: 0.6872 - val. acc.: 1.0000
Epoch 222
6s - loss: 0.5621 - acc.: 1.0000 - val. loss: 0.6915 - val. acc.: 1.0000
Epoch 223
6s - loss: 0.5612 - acc.: 1.0000 - val. loss: 0.6890 - val. acc.: 1.0000
Epoch 224
6s - loss: 0.5641 - acc.: 1.0000 - val. loss: 0.6910 - val. acc.: 1.0000
Epoch 225
6s - loss: 0.5628 - acc.: 1.0000 - val. loss: 0.6915 - val. acc.: 1.0000
Epoch 226
6s - loss: 0.5619 - acc.: 1.0000 - val. loss: 0.6922 - val. acc.: 1.0000
Epoch 227
6s - loss: 0.5628 - acc.: 1.0000 - val. loss: 0.6896 - val. acc.: 1.0000
Epoch 228
6s - loss: 0.5615 - acc.: 1.0000 - val. loss: 0.6965 - val. acc.: 1.0000
Epoch 229
6s - loss: 0.5615 - acc.: 1.0000 - val. loss: 0.6928 - val. acc.: 1.0000
Epoch 230
6s - loss: 0.5593 - acc.: 1.0000 - val. loss: 0.6894 - val. acc.: 1.0000
Epoch 231
6s - loss: 0.5615 - acc.: 1.0000 - val. loss: 0.6891 - val. acc.: 1.0000
Epoch 232
6s - loss: 0.5596 - acc.: 1.0000 - val. loss: 0.6920 - val. acc.: 1.0000
Epoch 233
6s - loss: 0.5594 - acc.: 1.0000 - val. loss: 0.6924 - val. acc.: 1.0000
Epoch 234
6s - loss: 0.5604 - acc.: 1.0000 - val. loss: 0.6907 - val. acc.: 1.0000
Epoch 235
6s - loss: 0.5571 - acc.: 1.0000 - val. loss: 0.6914 - val. acc.: 1.0000
Epoch 236
6s - loss: 0.5593 - acc.: 1.0000 - val. loss: 0.6989 - val. acc.: 1.0000
Epoch 237
6s - loss: 0.5595 - acc.: 1.0000 - val. loss: 0.6937 - val. acc.: 1.0000
Epoch 238
6s - loss: 0.5595 - acc.: 1.0000 - val. loss: 0.6929 - val. acc.: 1.0000
Epoch 239
6s - loss: 0.5568 - acc.: 1.0000 - val. loss: 0.6909 - val. acc.: 1.0000
Epoch 240
6s - loss: 0.5582 - acc.: 1.0000 - val. loss: 0.6902 - val. acc.: 1.0000
Epoch 241
6s - loss: 0.5567 - acc.: 1.0000 - val. loss: 0.6875 - val. acc.: 1.0000
Epoch 242
6s - loss: 0.5571 - acc.: 1.0000 - val. loss: 0.6978 - val. acc.: 1.0000
Epoch 243
6s - loss: 0.5565 - acc.: 1.0000 - val. loss: 0.6890 - val. acc.: 1.0000
Epoch 244
6s - loss: 0.5558 - acc.: 1.0000 - val. loss: 0.6960 - val. acc.: 1.0000
Epoch 245
6s - loss: 0.5559 - acc.: 1.0000 - val. loss: 0.6885 - val. acc.: 1.0000
Epoch 246
6s - loss: 0.5566 - acc.: 1.0000 - val. loss: 0.6984 - val. acc.: 1.0000
Epoch 247
6s - loss: 0.5548 - acc.: 1.0000 - val. loss: 0.6961 - val. acc.: 1.0000
Epoch 248
6s - loss: 0.5547 - acc.: 1.0000 - val. loss: 0.6986 - val. acc.: 1.0000
Epoch 249
6s - loss: 0.5567 - acc.: 1.0000 - val. loss: 0.6929 - val. acc.: 1.0000
Epoch 250
6s - loss: 0.5557 - acc.: 1.0000 - val. loss: 0.6923 - val. acc.: 1.0000
Epoch 251
6s - loss: 0.5550 - acc.: 1.0000 - val. loss: 0.6956 - val. acc.: 1.0000
Epoch 252
6s - loss: 0.5562 - acc.: 1.0000 - val. loss: 0.6885 - val. acc.: 1.0000
Epoch 253
6s - loss: 0.5545 - acc.: 1.0000 - val. loss: 0.6945 - val. acc.: 1.0000
Epoch 254
6s - loss: 0.5540 - acc.: 1.0000 - val. loss: 0.6948 - val. acc.: 1.0000
Epoch 255
6s - loss: 0.5538 - acc.: 1.0000 - val. loss: 0.6962 - val. acc.: 1.0000
Epoch 256
6s - loss: 0.5556 - acc.: 1.0000 - val. loss: 0.6956 - val. acc.: 1.0000
Epoch 257
6s - loss: 0.5530 - acc.: 1.0000 - val. loss: 0.6964 - val. acc.: 1.0000
Epoch 258
6s - loss: 0.5523 - acc.: 1.0000 - val. loss: 0.6918 - val. acc.: 1.0000
Epoch 259
6s - loss: 0.5524 - acc.: 1.0000 - val. loss: 0.6868 - val. acc.: 1.0000
Epoch 260
6s - loss: 0.5533 - acc.: 1.0000 - val. loss: 0.6927 - val. acc.: 1.0000
Epoch 261
6s - loss: 0.5527 - acc.: 1.0000 - val. loss: 0.6966 - val. acc.: 1.0000
Epoch 262
6s - loss: 0.5532 - acc.: 1.0000 - val. loss: 0.6865 - val. acc.: 1.0000
Epoch 263
6s - loss: 0.5536 - acc.: 1.0000 - val. loss: 0.6930 - val. acc.: 1.0000
Epoch 264
6s - loss: 0.5534 - acc.: 1.0000 - val. loss: 0.6917 - val. acc.: 1.0000
Epoch 265
6s - loss: 0.5497 - acc.: 1.0000 - val. loss: 0.6992 - val. acc.: 1.0000
Epoch 266
6s - loss: 0.5508 - acc.: 1.0000 - val. loss: 0.6983 - val. acc.: 1.0000
Epoch 267
6s - loss: 0.5518 - acc.: 1.0000 - val. loss: 0.6958 - val. acc.: 1.0000
Epoch 268
6s - loss: 0.5514 - acc.: 1.0000 - val. loss: 0.7018 - val. acc.: 1.0000
Epoch 269
6s - loss: 0.5514 - acc.: 1.0000 - val. loss: 0.6963 - val. acc.: 1.0000
Epoch 270
6s - loss: 0.5503 - acc.: 1.0000 - val. loss: 0.6949 - val. acc.: 1.0000
Epoch 271
6s - loss: 0.5513 - acc.: 1.0000 - val. loss: 0.6980 - val. acc.: 1.0000
Epoch 272
6s - loss: 0.5511 - acc.: 1.0000 - val. loss: 0.6976 - val. acc.: 1.0000
Epoch 273
6s - loss: 0.5499 - acc.: 1.0000 - val. loss: 0.6952 - val. acc.: 1.0000
Epoch 274
6s - loss: 0.5512 - acc.: 1.0000 - val. loss: 0.6911 - val. acc.: 1.0000
Epoch 275
6s - loss: 0.5487 - acc.: 1.0000 - val. loss: 0.6940 - val. acc.: 1.0000
Epoch 276
6s - loss: 0.5485 - acc.: 1.0000 - val. loss: 0.6887 - val. acc.: 1.0000
Epoch 277
6s - loss: 0.5469 - acc.: 1.0000 - val. loss: 0.6978 - val. acc.: 1.0000
Epoch 278
6s - loss: 0.5488 - acc.: 1.0000 - val. loss: 0.6909 - val. acc.: 1.0000
Epoch 279
6s - loss: 0.5497 - acc.: 1.0000 - val. loss: 0.6987 - val. acc.: 1.0000
Epoch 280
6s - loss: 0.5489 - acc.: 1.0000 - val. loss: 0.6961 - val. acc.: 1.0000
Epoch 281
6s - loss: 0.5480 - acc.: 1.0000 - val. loss: 0.6970 - val. acc.: 1.0000
Epoch 282
6s - loss: 0.5489 - acc.: 1.0000 - val. loss: 0.6966 - val. acc.: 1.0000
Epoch 283
6s - loss: 0.5468 - acc.: 1.0000 - val. loss: 0.6939 - val. acc.: 1.0000
Epoch 284
6s - loss: 0.5479 - acc.: 1.0000 - val. loss: 0.6931 - val. acc.: 1.0000
Epoch 285
6s - loss: 0.5474 - acc.: 1.0000 - val. loss: 0.6943 - val. acc.: 1.0000
Epoch 286
6s - loss: 0.5457 - acc.: 1.0000 - val. loss: 0.6974 - val. acc.: 1.0000
Epoch 287
6s - loss: 0.5484 - acc.: 1.0000 - val. loss: 0.6961 - val. acc.: 1.0000
Epoch 288
6s - loss: 0.5460 - acc.: 1.0000 - val. loss: 0.6963 - val. acc.: 1.0000
Epoch 289
6s - loss: 0.5481 - acc.: 1.0000 - val. loss: 0.6971 - val. acc.: 1.0000
Epoch 290
6s - loss: 0.5447 - acc.: 1.0000 - val. loss: 0.6912 - val. acc.: 1.0000
Epoch 291
6s - loss: 0.5450 - acc.: 1.0000 - val. loss: 0.6941 - val. acc.: 1.0000
Epoch 292
6s - loss: 0.5446 - acc.: 1.0000 - val. loss: 0.6983 - val. acc.: 1.0000
Epoch 293
6s - loss: 0.5452 - acc.: 1.0000 - val. loss: 0.6970 - val. acc.: 1.0000
Epoch 294
6s - loss: 0.5463 - acc.: 1.0000 - val. loss: 0.6962 - val. acc.: 1.0000
Epoch 295
6s - loss: 0.5455 - acc.: 1.0000 - val. loss: 0.6980 - val. acc.: 1.0000
Epoch 296
6s - loss: 0.5470 - acc.: 1.0000 - val. loss: 0.6940 - val. acc.: 1.0000
Epoch 297
6s - loss: 0.5441 - acc.: 1.0000 - val. loss: 0.6941 - val. acc.: 1.0000
Epoch 298
6s - loss: 0.5423 - acc.: 1.0000 - val. loss: 0.6970 - val. acc.: 1.0000
Epoch 299
6s - loss: 0.5433 - acc.: 1.0000 - val. loss: 0.6935 - val. acc.: 1.0000
Epoch 300
6s - loss: 0.5425 - acc.: 1.0000 - val. loss: 0.6941 - val. acc.: 1.0000
Epoch 301
6s - loss: 0.5444 - acc.: 1.0000 - val. loss: 0.7119 - val. acc.: 1.0000
Epoch 302
6s - loss: 0.5434 - acc.: 1.0000 - val. loss: 0.7043 - val. acc.: 1.0000
Epoch 303
6s - loss: 0.5432 - acc.: 1.0000 - val. loss: 0.6955 - val. acc.: 1.0000
Epoch 304
6s - loss: 0.5421 - acc.: 1.0000 - val. loss: 0.6902 - val. acc.: 1.0000
Epoch 305
6s - loss: 0.5423 - acc.: 1.0000 - val. loss: 0.6906 - val. acc.: 1.0000
Epoch 306
6s - loss: 0.5418 - acc.: 1.0000 - val. loss: 0.6895 - val. acc.: 1.0000
Epoch 307
6s - loss: 0.5446 - acc.: 1.0000 - val. loss: 0.7019 - val. acc.: 1.0000
Epoch 308
6s - loss: 0.5403 - acc.: 1.0000 - val. loss: 0.7021 - val. acc.: 1.0000
Epoch 309
6s - loss: 0.5415 - acc.: 1.0000 - val. loss: 0.7067 - val. acc.: 1.0000
Epoch 310
6s - loss: 0.5398 - acc.: 1.0000 - val. loss: 0.7001 - val. acc.: 1.0000
Epoch 311
6s - loss: 0.5414 - acc.: 1.0000 - val. loss: 0.6929 - val. acc.: 1.0000
Epoch 312
6s - loss: 0.5399 - acc.: 1.0000 - val. loss: 0.6989 - val. acc.: 1.0000
Epoch 313
6s - loss: 0.5397 - acc.: 1.0000 - val. loss: 0.6915 - val. acc.: 1.0000
Epoch 314
6s - loss: 0.5417 - acc.: 1.0000 - val. loss: 0.6962 - val. acc.: 1.0000
Epoch 315
6s - loss: 0.5425 - acc.: 1.0000 - val. loss: 0.6906 - val. acc.: 1.0000
Epoch 316
6s - loss: 0.5415 - acc.: 1.0000 - val. loss: 0.7019 - val. acc.: 1.0000
Epoch 317
6s - loss: 0.5381 - acc.: 1.0000 - val. loss: 0.6921 - val. acc.: 1.0000
Epoch 318
6s - loss: 0.5416 - acc.: 1.0000 - val. loss: 0.6927 - val. acc.: 1.0000
Epoch 319
6s - loss: 0.5400 - acc.: 1.0000 - val. loss: 0.6983 - val. acc.: 1.0000
Epoch 320
6s - loss: 0.5375 - acc.: 1.0000 - val. loss: 0.6955 - val. acc.: 1.0000
Epoch 321
6s - loss: 0.5379 - acc.: 1.0000 - val. loss: 0.6974 - val. acc.: 1.0000
Epoch 322
6s - loss: 0.5381 - acc.: 1.0000 - val. loss: 0.7080 - val. acc.: 1.0000
Epoch 323
6s - loss: 0.5397 - acc.: 1.0000 - val. loss: 0.6934 - val. acc.: 1.0000
Epoch 324
6s - loss: 0.5388 - acc.: 1.0000 - val. loss: 0.7018 - val. acc.: 1.0000
Epoch 325
6s - loss: 0.5377 - acc.: 1.0000 - val. loss: 0.6943 - val. acc.: 1.0000
Epoch 326
6s - loss: 0.5385 - acc.: 1.0000 - val. loss: 0.6998 - val. acc.: 1.0000
Epoch 327
6s - loss: 0.5365 - acc.: 1.0000 - val. loss: 0.6955 - val. acc.: 1.0000
Epoch 328
6s - loss: 0.5352 - acc.: 1.0000 - val. loss: 0.6929 - val. acc.: 1.0000
Epoch 329
6s - loss: 0.5356 - acc.: 1.0000 - val. loss: 0.6977 - val. acc.: 1.0000
Epoch 330
6s - loss: 0.5400 - acc.: 1.0000 - val. loss: 0.6961 - val. acc.: 1.0000
Epoch 331
6s - loss: 0.5358 - acc.: 1.0000 - val. loss: 0.7039 - val. acc.: 1.0000
Epoch 332
6s - loss: 0.5355 - acc.: 1.0000 - val. loss: 0.6999 - val. acc.: 1.0000
Epoch 333
6s - loss: 0.5386 - acc.: 1.0000 - val. loss: 0.6961 - val. acc.: 1.0000
Epoch 334
6s - loss: 0.5375 - acc.: 1.0000 - val. loss: 0.6981 - val. acc.: 1.0000
Epoch 335
6s - loss: 0.5364 - acc.: 1.0000 - val. loss: 0.6997 - val. acc.: 1.0000
Epoch 336
6s - loss: 0.5361 - acc.: 1.0000 - val. loss: 0.6932 - val. acc.: 1.0000
Epoch 337
6s - loss: 0.5333 - acc.: 1.0000 - val. loss: 0.6972 - val. acc.: 1.0000
Epoch 338
6s - loss: 0.5351 - acc.: 1.0000 - val. loss: 0.6961 - val. acc.: 1.0000
Epoch 339
6s - loss: 0.5357 - acc.: 1.0000 - val. loss: 0.6986 - val. acc.: 1.0000
Epoch 340
6s - loss: 0.5357 - acc.: 1.0000 - val. loss: 0.6932 - val. acc.: 1.0000
Epoch 341
6s - loss: 0.5359 - acc.: 1.0000 - val. loss: 0.6967 - val. acc.: 1.0000
Epoch 342
6s - loss: 0.5338 - acc.: 1.0000 - val. loss: 0.7018 - val. acc.: 1.0000
Epoch 343
6s - loss: 0.5322 - acc.: 1.0000 - val. loss: 0.6991 - val. acc.: 1.0000
Epoch 344
6s - loss: 0.5343 - acc.: 1.0000 - val. loss: 0.6967 - val. acc.: 1.0000
Epoch 345
6s - loss: 0.5343 - acc.: 1.0000 - val. loss: 0.7006 - val. acc.: 1.0000
Epoch 346
6s - loss: 0.5342 - acc.: 1.0000 - val. loss: 0.6901 - val. acc.: 1.0000
Epoch 347
6s - loss: 0.5335 - acc.: 1.0000 - val. loss: 0.7003 - val. acc.: 1.0000
Epoch 348
6s - loss: 0.5338 - acc.: 1.0000 - val. loss: 0.7042 - val. acc.: 1.0000
Epoch 349
6s - loss: 0.5320 - acc.: 1.0000 - val. loss: 0.7023 - val. acc.: 1.0000
Epoch 350
6s - loss: 0.5335 - acc.: 1.0000 - val. loss: 0.7013 - val. acc.: 1.0000
Epoch 351
6s - loss: 0.5338 - acc.: 1.0000 - val. loss: 0.7102 - val. acc.: 1.0000
Epoch 352
6s - loss: 0.5343 - acc.: 1.0000 - val. loss: 0.6966 - val. acc.: 1.0000
Epoch 353
6s - loss: 0.5344 - acc.: 1.0000 - val. loss: 0.6938 - val. acc.: 1.0000
Epoch 354
6s - loss: 0.5322 - acc.: 1.0000 - val. loss: 0.7015 - val. acc.: 1.0000
Epoch 355
6s - loss: 0.5317 - acc.: 1.0000 - val. loss: 0.6921 - val. acc.: 1.0000
Epoch 356
6s - loss: 0.5327 - acc.: 1.0000 - val. loss: 0.6968 - val. acc.: 1.0000
Epoch 357
6s - loss: 0.5315 - acc.: 1.0000 - val. loss: 0.6948 - val. acc.: 1.0000
Epoch 358
6s - loss: 0.5320 - acc.: 1.0000 - val. loss: 0.6956 - val. acc.: 1.0000
Epoch 359
6s - loss: 0.5317 - acc.: 1.0000 - val. loss: 0.6982 - val. acc.: 1.0000
Epoch 360
6s - loss: 0.5294 - acc.: 1.0000 - val. loss: 0.6952 - val. acc.: 1.0000
Epoch 361
6s - loss: 0.5311 - acc.: 1.0000 - val. loss: 0.6953 - val. acc.: 1.0000
Epoch 362
6s - loss: 0.5301 - acc.: 1.0000 - val. loss: 0.7009 - val. acc.: 1.0000
Epoch 363
6s - loss: 0.5318 - acc.: 1.0000 - val. loss: 0.6987 - val. acc.: 1.0000
Epoch 364
6s - loss: 0.5303 - acc.: 1.0000 - val. loss: 0.7014 - val. acc.: 1.0000
Epoch 365
6s - loss: 0.5298 - acc.: 1.0000 - val. loss: 0.7024 - val. acc.: 1.0000
Epoch 366
6s - loss: 0.5311 - acc.: 1.0000 - val. loss: 0.7099 - val. acc.: 1.0000
Epoch 367
6s - loss: 0.5298 - acc.: 1.0000 - val. loss: 0.6969 - val. acc.: 1.0000
Epoch 368
6s - loss: 0.5309 - acc.: 1.0000 - val. loss: 0.7007 - val. acc.: 1.0000
Epoch 369
6s - loss: 0.5304 - acc.: 1.0000 - val. loss: 0.7034 - val. acc.: 1.0000
Epoch 370
6s - loss: 0.5287 - acc.: 1.0000 - val. loss: 0.6998 - val. acc.: 1.0000
Epoch 371
6s - loss: 0.5274 - acc.: 1.0000 - val. loss: 0.7000 - val. acc.: 1.0000
Epoch 372
6s - loss: 0.5296 - acc.: 1.0000 - val. loss: 0.6928 - val. acc.: 1.0000
Epoch 373
6s - loss: 0.5270 - acc.: 1.0000 - val. loss: 0.6950 - val. acc.: 1.0000
Epoch 374
6s - loss: 0.5296 - acc.: 1.0000 - val. loss: 0.6957 - val. acc.: 1.0000
Epoch 375
6s - loss: 0.5285 - acc.: 1.0000 - val. loss: 0.7003 - val. acc.: 1.0000
Epoch 376
6s - loss: 0.5285 - acc.: 1.0000 - val. loss: 0.6938 - val. acc.: 1.0000
Epoch 377
6s - loss: 0.5270 - acc.: 1.0000 - val. loss: 0.6977 - val. acc.: 1.0000
Epoch 378
6s - loss: 0.5282 - acc.: 1.0000 - val. loss: 0.6961 - val. acc.: 1.0000
Epoch 379
6s - loss: 0.5253 - acc.: 1.0000 - val. loss: 0.6993 - val. acc.: 1.0000
Epoch 380
6s - loss: 0.5265 - acc.: 1.0000 - val. loss: 0.7016 - val. acc.: 1.0000
Epoch 381
6s - loss: 0.5285 - acc.: 1.0000 - val. loss: 0.7053 - val. acc.: 1.0000
Epoch 382
6s - loss: 0.5283 - acc.: 1.0000 - val. loss: 0.6956 - val. acc.: 1.0000
Epoch 383
6s - loss: 0.5287 - acc.: 1.0000 - val. loss: 0.7032 - val. acc.: 1.0000
Epoch 384
6s - loss: 0.5245 - acc.: 1.0000 - val. loss: 0.7019 - val. acc.: 1.0000
Epoch 385
6s - loss: 0.5275 - acc.: 1.0000 - val. loss: 0.6994 - val. acc.: 1.0000
Epoch 386
6s - loss: 0.5269 - acc.: 1.0000 - val. loss: 0.7049 - val. acc.: 1.0000
Epoch 387
6s - loss: 0.5269 - acc.: 1.0000 - val. loss: 0.6975 - val. acc.: 1.0000
Epoch 388
6s - loss: 0.5278 - acc.: 1.0000 - val. loss: 0.7004 - val. acc.: 1.0000
Epoch 389
6s - loss: 0.5258 - acc.: 1.0000 - val. loss: 0.7009 - val. acc.: 1.0000
Epoch 390
6s - loss: 0.5261 - acc.: 1.0000 - val. loss: 0.6979 - val. acc.: 1.0000
Epoch 391
6s - loss: 0.5270 - acc.: 1.0000 - val. loss: 0.6956 - val. acc.: 1.0000
Epoch 392
6s - loss: 0.5264 - acc.: 1.0000 - val. loss: 0.6951 - val. acc.: 1.0000
Epoch 393
6s - loss: 0.5255 - acc.: 1.0000 - val. loss: 0.6970 - val. acc.: 1.0000
Epoch 394
6s - loss: 0.5247 - acc.: 1.0000 - val. loss: 0.6951 - val. acc.: 1.0000
Epoch 395
6s - loss: 0.5273 - acc.: 1.0000 - val. loss: 0.6977 - val. acc.: 1.0000
Epoch 396
6s - loss: 0.5276 - acc.: 1.0000 - val. loss: 0.7001 - val. acc.: 1.0000
Epoch 397
6s - loss: 0.5256 - acc.: 1.0000 - val. loss: 0.6966 - val. acc.: 1.0000
Epoch 398
6s - loss: 0.5252 - acc.: 1.0000 - val. loss: 0.7008 - val. acc.: 1.0000
Epoch 399
6s - loss: 0.5237 - acc.: 1.0000 - val. loss: 0.6992 - val. acc.: 1.0000
Epoch 400
6s - loss: 0.5258 - acc.: 1.0000 - val. loss: 0.6999 - val. acc.: 1.0000
Epoch 401
6s - loss: 0.5242 - acc.: 1.0000 - val. loss: 0.7028 - val. acc.: 1.0000
Epoch 402
6s - loss: 0.5249 - acc.: 1.0000 - val. loss: 0.6995 - val. acc.: 1.0000
Epoch 403
6s - loss: 0.5245 - acc.: 1.0000 - val. loss: 0.7015 - val. acc.: 1.0000
Epoch 404
6s - loss: 0.5253 - acc.: 1.0000 - val. loss: 0.7008 - val. acc.: 1.0000
Epoch 405
6s - loss: 0.5235 - acc.: 1.0000 - val. loss: 0.6940 - val. acc.: 1.0000
Epoch 406
6s - loss: 0.5271 - acc.: 1.0000 - val. loss: 0.6989 - val. acc.: 1.0000
Epoch 407
6s - loss: 0.5242 - acc.: 1.0000 - val. loss: 0.7072 - val. acc.: 1.0000
Epoch 408
6s - loss: 0.5217 - acc.: 1.0000 - val. loss: 0.7005 - val. acc.: 1.0000
Epoch 409
6s - loss: 0.5240 - acc.: 1.0000 - val. loss: 0.7041 - val. acc.: 1.0000
Epoch 410
6s - loss: 0.5248 - acc.: 1.0000 - val. loss: 0.7029 - val. acc.: 1.0000
Epoch 411
6s - loss: 0.5234 - acc.: 1.0000 - val. loss: 0.7063 - val. acc.: 1.0000
Epoch 412
6s - loss: 0.5234 - acc.: 1.0000 - val. loss: 0.7018 - val. acc.: 1.0000
Epoch 413
6s - loss: 0.5234 - acc.: 1.0000 - val. loss: 0.7011 - val. acc.: 1.0000
Epoch 414
6s - loss: 0.5229 - acc.: 1.0000 - val. loss: 0.7020 - val. acc.: 1.0000
Epoch 415
6s - loss: 0.5243 - acc.: 1.0000 - val. loss: 0.6999 - val. acc.: 1.0000
Epoch 416
6s - loss: 0.5207 - acc.: 1.0000 - val. loss: 0.6938 - val. acc.: 1.0000
Epoch 417
6s - loss: 0.5211 - acc.: 1.0000 - val. loss: 0.7064 - val. acc.: 1.0000
Epoch 418
6s - loss: 0.5205 - acc.: 1.0000 - val. loss: 0.7041 - val. acc.: 1.0000
Epoch 419
6s - loss: 0.5192 - acc.: 1.0000 - val. loss: 0.6945 - val. acc.: 1.0000
Epoch 420
6s - loss: 0.5199 - acc.: 1.0000 - val. loss: 0.6997 - val. acc.: 1.0000
Epoch 421
6s - loss: 0.5217 - acc.: 1.0000 - val. loss: 0.7013 - val. acc.: 1.0000
Epoch 422
6s - loss: 0.5212 - acc.: 1.0000 - val. loss: 0.7006 - val. acc.: 1.0000
Epoch 423
6s - loss: 0.5212 - acc.: 1.0000 - val. loss: 0.7050 - val. acc.: 1.0000
Epoch 424
6s - loss: 0.5202 - acc.: 1.0000 - val. loss: 0.6958 - val. acc.: 1.0000
Epoch 425
6s - loss: 0.5204 - acc.: 1.0000 - val. loss: 0.6948 - val. acc.: 1.0000
Epoch 426
6s - loss: 0.5223 - acc.: 1.0000 - val. loss: 0.7008 - val. acc.: 1.0000
Epoch 427
6s - loss: 0.5196 - acc.: 1.0000 - val. loss: 0.6976 - val. acc.: 1.0000
Epoch 428
6s - loss: 0.5211 - acc.: 1.0000 - val. loss: 0.6951 - val. acc.: 1.0000
Epoch 429
6s - loss: 0.5168 - acc.: 1.0000 - val. loss: 0.6971 - val. acc.: 1.0000
Epoch 430
6s - loss: 0.5189 - acc.: 1.0000 - val. loss: 0.6938 - val. acc.: 1.0000
Epoch 431
6s - loss: 0.5173 - acc.: 1.0000 - val. loss: 0.6921 - val. acc.: 1.0000
Epoch 432
6s - loss: 0.5199 - acc.: 1.0000 - val. loss: 0.6986 - val. acc.: 1.0000
Epoch 433
6s - loss: 0.5185 - acc.: 1.0000 - val. loss: 0.6908 - val. acc.: 1.0000
Epoch 434
6s - loss: 0.5188 - acc.: 1.0000 - val. loss: 0.6976 - val. acc.: 1.0000
Epoch 435
6s - loss: 0.5202 - acc.: 1.0000 - val. loss: 0.7113 - val. acc.: 1.0000
Epoch 436
6s - loss: 0.5196 - acc.: 1.0000 - val. loss: 0.7021 - val. acc.: 1.0000
Epoch 437
6s - loss: 0.5195 - acc.: 1.0000 - val. loss: 0.6997 - val. acc.: 1.0000
Epoch 438
6s - loss: 0.5190 - acc.: 1.0000 - val. loss: 0.7000 - val. acc.: 1.0000
Epoch 439
6s - loss: 0.5173 - acc.: 1.0000 - val. loss: 0.6995 - val. acc.: 1.0000
Epoch 440
6s - loss: 0.5160 - acc.: 1.0000 - val. loss: 0.7034 - val. acc.: 1.0000
Epoch 441
6s - loss: 0.5181 - acc.: 1.0000 - val. loss: 0.6979 - val. acc.: 1.0000
Epoch 442
6s - loss: 0.5187 - acc.: 1.0000 - val. loss: 0.7007 - val. acc.: 1.0000
Epoch 443
6s - loss: 0.5166 - acc.: 1.0000 - val. loss: 0.7091 - val. acc.: 1.0000
Epoch 444
6s - loss: 0.5168 - acc.: 1.0000 - val. loss: 0.7040 - val. acc.: 1.0000
Epoch 445
6s - loss: 0.5171 - acc.: 1.0000 - val. loss: 0.7000 - val. acc.: 1.0000
Epoch 446
6s - loss: 0.5178 - acc.: 1.0000 - val. loss: 0.6932 - val. acc.: 1.0000
Epoch 447
6s - loss: 0.5165 - acc.: 1.0000 - val. loss: 0.7032 - val. acc.: 1.0000
Epoch 448
6s - loss: 0.5179 - acc.: 1.0000 - val. loss: 0.7025 - val. acc.: 1.0000
Epoch 449
6s - loss: 0.5186 - acc.: 1.0000 - val. loss: 0.7045 - val. acc.: 1.0000
Epoch 450
6s - loss: 0.5185 - acc.: 1.0000 - val. loss: 0.6990 - val. acc.: 1.0000
Epoch 451
6s - loss: 0.5181 - acc.: 1.0000 - val. loss: 0.7069 - val. acc.: 1.0000
Epoch 452
6s - loss: 0.5167 - acc.: 1.0000 - val. loss: 0.7077 - val. acc.: 1.0000
Epoch 453
6s - loss: 0.5166 - acc.: 1.0000 - val. loss: 0.7015 - val. acc.: 1.0000
Epoch 454
6s - loss: 0.5175 - acc.: 1.0000 - val. loss: 0.7058 - val. acc.: 1.0000
Epoch 455
6s - loss: 0.5154 - acc.: 1.0000 - val. loss: 0.7035 - val. acc.: 1.0000
Epoch 456
6s - loss: 0.5161 - acc.: 1.0000 - val. loss: 0.7053 - val. acc.: 1.0000
Epoch 457
6s - loss: 0.5155 - acc.: 1.0000 - val. loss: 0.7148 - val. acc.: 1.0000
Epoch 458
6s - loss: 0.5170 - acc.: 1.0000 - val. loss: 0.7014 - val. acc.: 1.0000
Epoch 459
6s - loss: 0.5151 - acc.: 1.0000 - val. loss: 0.7059 - val. acc.: 1.0000
Epoch 460
6s - loss: 0.5130 - acc.: 1.0000 - val. loss: 0.7013 - val. acc.: 1.0000
Epoch 461
6s - loss: 0.5149 - acc.: 1.0000 - val. loss: 0.7021 - val. acc.: 1.0000
Epoch 462
6s - loss: 0.5124 - acc.: 1.0000 - val. loss: 0.6987 - val. acc.: 1.0000
Epoch 463
6s - loss: 0.5155 - acc.: 1.0000 - val. loss: 0.7055 - val. acc.: 1.0000
Epoch 464
6s - loss: 0.5152 - acc.: 1.0000 - val. loss: 0.7087 - val. acc.: 1.0000
Epoch 465
6s - loss: 0.5125 - acc.: 1.0000 - val. loss: 0.7001 - val. acc.: 1.0000
Epoch 466
6s - loss: 0.5155 - acc.: 1.0000 - val. loss: 0.7047 - val. acc.: 1.0000
Epoch 467
6s - loss: 0.5150 - acc.: 1.0000 - val. loss: 0.6998 - val. acc.: 1.0000
Epoch 468
6s - loss: 0.5161 - acc.: 1.0000 - val. loss: 0.6969 - val. acc.: 1.0000
Epoch 469
6s - loss: 0.5160 - acc.: 1.0000 - val. loss: 0.7025 - val. acc.: 1.0000
Epoch 470
6s - loss: 0.5134 - acc.: 1.0000 - val. loss: 0.7024 - val. acc.: 1.0000
Epoch 471
6s - loss: 0.5152 - acc.: 1.0000 - val. loss: 0.7083 - val. acc.: 1.0000
Epoch 472
6s - loss: 0.5123 - acc.: 1.0000 - val. loss: 0.7022 - val. acc.: 1.0000
Epoch 473
6s - loss: 0.5138 - acc.: 1.0000 - val. loss: 0.7016 - val. acc.: 1.0000
Epoch 474
6s - loss: 0.5117 - acc.: 1.0000 - val. loss: 0.7085 - val. acc.: 1.0000
Epoch 475
6s - loss: 0.5141 - acc.: 1.0000 - val. loss: 0.7019 - val. acc.: 1.0000
Epoch 476
6s - loss: 0.5134 - acc.: 1.0000 - val. loss: 0.6976 - val. acc.: 1.0000
Epoch 477
6s - loss: 0.5119 - acc.: 1.0000 - val. loss: 0.7072 - val. acc.: 1.0000
Epoch 478
6s - loss: 0.5125 - acc.: 1.0000 - val. loss: 0.7069 - val. acc.: 1.0000
Epoch 479
6s - loss: 0.5132 - acc.: 1.0000 - val. loss: 0.6994 - val. acc.: 1.0000
Epoch 480
6s - loss: 0.5121 - acc.: 1.0000 - val. loss: 0.7047 - val. acc.: 1.0000
Epoch 481
6s - loss: 0.5123 - acc.: 1.0000 - val. loss: 0.6995 - val. acc.: 1.0000
Epoch 482
6s - loss: 0.5132 - acc.: 1.0000 - val. loss: 0.7039 - val. acc.: 1.0000
Epoch 483
6s - loss: 0.5126 - acc.: 1.0000 - val. loss: 0.7074 - val. acc.: 1.0000
Epoch 484
6s - loss: 0.5110 - acc.: 1.0000 - val. loss: 0.6972 - val. acc.: 1.0000
Epoch 485
6s - loss: 0.5133 - acc.: 1.0000 - val. loss: 0.7048 - val. acc.: 1.0000
Epoch 486
6s - loss: 0.5128 - acc.: 1.0000 - val. loss: 0.7075 - val. acc.: 1.0000
Epoch 487
6s - loss: 0.5106 - acc.: 1.0000 - val. loss: 0.7019 - val. acc.: 1.0000
Epoch 488
6s - loss: 0.5125 - acc.: 1.0000 - val. loss: 0.7063 - val. acc.: 1.0000
Epoch 489
6s - loss: 0.5125 - acc.: 1.0000 - val. loss: 0.6935 - val. acc.: 1.0000
Epoch 490
6s - loss: 0.5111 - acc.: 1.0000 - val. loss: 0.7028 - val. acc.: 1.0000
Epoch 491
6s - loss: 0.5115 - acc.: 1.0000 - val. loss: 0.7060 - val. acc.: 1.0000
Epoch 492
6s - loss: 0.5121 - acc.: 1.0000 - val. loss: 0.7025 - val. acc.: 1.0000
Epoch 493
6s - loss: 0.5116 - acc.: 1.0000 - val. loss: 0.6993 - val. acc.: 1.0000
Epoch 494
6s - loss: 0.5099 - acc.: 1.0000 - val. loss: 0.6977 - val. acc.: 1.0000
Epoch 495
6s - loss: 0.5114 - acc.: 1.0000 - val. loss: 0.7009 - val. acc.: 1.0000
Epoch 496
6s - loss: 0.5120 - acc.: 1.0000 - val. loss: 0.7004 - val. acc.: 1.0000
Epoch 497
6s - loss: 0.5111 - acc.: 1.0000 - val. loss: 0.6996 - val. acc.: 1.0000
Epoch 498
6s - loss: 0.5115 - acc.: 1.0000 - val. loss: 0.7005 - val. acc.: 1.0000
Epoch 499
6s - loss: 0.5122 - acc.: 1.0000 - val. loss: 0.7053 - val. acc.: 1.0000
Epoch 500
6s - loss: 0.5098 - acc.: 1.0000 - val. loss: 0.6973 - val. acc.: 1.0000
Epoch 501
6s - loss: 0.5094 - acc.: 1.0000 - val. loss: 0.6992 - val. acc.: 1.0000
Epoch 502
6s - loss: 0.5069 - acc.: 1.0000 - val. loss: 0.6996 - val. acc.: 1.0000
Epoch 503
6s - loss: 0.5116 - acc.: 1.0000 - val. loss: 0.6971 - val. acc.: 1.0000
Epoch 504
6s - loss: 0.5104 - acc.: 1.0000 - val. loss: 0.6998 - val. acc.: 1.0000
Epoch 505
6s - loss: 0.5088 - acc.: 1.0000 - val. loss: 0.7029 - val. acc.: 1.0000
Epoch 506
6s - loss: 0.5089 - acc.: 1.0000 - val. loss: 0.7012 - val. acc.: 1.0000
Epoch 507
6s - loss: 0.5074 - acc.: 1.0000 - val. loss: 0.7039 - val. acc.: 1.0000
Epoch 508
6s - loss: 0.5082 - acc.: 1.0000 - val. loss: 0.6924 - val. acc.: 1.0000
Epoch 509
6s - loss: 0.5070 - acc.: 1.0000 - val. loss: 0.7003 - val. acc.: 1.0000
Epoch 510
6s - loss: 0.5084 - acc.: 1.0000 - val. loss: 0.6980 - val. acc.: 1.0000
Epoch 511
6s - loss: 0.5088 - acc.: 1.0000 - val. loss: 0.6981 - val. acc.: 1.0000
Epoch 512
6s - loss: 0.5094 - acc.: 1.0000 - val. loss: 0.7020 - val. acc.: 1.0000
Epoch 513
6s - loss: 0.5074 - acc.: 1.0000 - val. loss: 0.7048 - val. acc.: 1.0000
Epoch 514
6s - loss: 0.5078 - acc.: 1.0000 - val. loss: 0.7015 - val. acc.: 1.0000
Epoch 515
6s - loss: 0.5083 - acc.: 1.0000 - val. loss: 0.7055 - val. acc.: 1.0000
Epoch 516
6s - loss: 0.5079 - acc.: 1.0000 - val. loss: 0.7021 - val. acc.: 1.0000
Epoch 517
6s - loss: 0.5068 - acc.: 1.0000 - val. loss: 0.7109 - val. acc.: 1.0000
Epoch 518
6s - loss: 0.5091 - acc.: 1.0000 - val. loss: 0.7022 - val. acc.: 1.0000
Epoch 519
6s - loss: 0.5079 - acc.: 1.0000 - val. loss: 0.7040 - val. acc.: 1.0000
Epoch 520
6s - loss: 0.5102 - acc.: 1.0000 - val. loss: 0.7039 - val. acc.: 1.0000
Epoch 521
6s - loss: 0.5080 - acc.: 1.0000 - val. loss: 0.7013 - val. acc.: 1.0000
Epoch 522
6s - loss: 0.5062 - acc.: 1.0000 - val. loss: 0.6984 - val. acc.: 1.0000
Epoch 523
6s - loss: 0.5068 - acc.: 1.0000 - val. loss: 0.7001 - val. acc.: 1.0000
Epoch 524
6s - loss: 0.5089 - acc.: 1.0000 - val. loss: 0.7076 - val. acc.: 1.0000
Epoch 525
6s - loss: 0.5072 - acc.: 1.0000 - val. loss: 0.7008 - val. acc.: 1.0000
Epoch 526
6s - loss: 0.5095 - acc.: 1.0000 - val. loss: 0.6990 - val. acc.: 1.0000
Epoch 527
6s - loss: 0.5071 - acc.: 1.0000 - val. loss: 0.6974 - val. acc.: 1.0000
Epoch 528
6s - loss: 0.5064 - acc.: 1.0000 - val. loss: 0.7013 - val. acc.: 1.0000
Epoch 529
6s - loss: 0.5067 - acc.: 1.0000 - val. loss: 0.7076 - val. acc.: 1.0000
Epoch 530
6s - loss: 0.5076 - acc.: 1.0000 - val. loss: 0.7044 - val. acc.: 1.0000
Epoch 531
6s - loss: 0.5082 - acc.: 1.0000 - val. loss: 0.7098 - val. acc.: 1.0000
Epoch 532
6s - loss: 0.5046 - acc.: 1.0000 - val. loss: 0.6986 - val. acc.: 1.0000
Epoch 533
6s - loss: 0.5044 - acc.: 1.0000 - val. loss: 0.7001 - val. acc.: 1.0000
Epoch 534
6s - loss: 0.5073 - acc.: 1.0000 - val. loss: 0.7029 - val. acc.: 1.0000
Epoch 535
6s - loss: 0.5063 - acc.: 1.0000 - val. loss: 0.7052 - val. acc.: 1.0000
Epoch 536
6s - loss: 0.5066 - acc.: 1.0000 - val. loss: 0.6994 - val. acc.: 1.0000
Epoch 537
6s - loss: 0.5049 - acc.: 1.0000 - val. loss: 0.6976 - val. acc.: 1.0000
Epoch 538
6s - loss: 0.5051 - acc.: 1.0000 - val. loss: 0.7009 - val. acc.: 1.0000
Epoch 539
6s - loss: 0.5035 - acc.: 1.0000 - val. loss: 0.7080 - val. acc.: 1.0000
Epoch 540
6s - loss: 0.5043 - acc.: 1.0000 - val. loss: 0.7092 - val. acc.: 1.0000
Epoch 541
6s - loss: 0.5068 - acc.: 1.0000 - val. loss: 0.7062 - val. acc.: 1.0000
Epoch 542
6s - loss: 0.5053 - acc.: 1.0000 - val. loss: 0.7092 - val. acc.: 1.0000
Epoch 543
6s - loss: 0.5061 - acc.: 1.0000 - val. loss: 0.6959 - val. acc.: 1.0000
Epoch 544
6s - loss: 0.5026 - acc.: 1.0000 - val. loss: 0.7041 - val. acc.: 1.0000
Epoch 545
6s - loss: 0.5041 - acc.: 1.0000 - val. loss: 0.6973 - val. acc.: 1.0000
Epoch 546
6s - loss: 0.5057 - acc.: 1.0000 - val. loss: 0.7052 - val. acc.: 1.0000
Epoch 547
6s - loss: 0.5043 - acc.: 1.0000 - val. loss: 0.7080 - val. acc.: 1.0000
Epoch 548
6s - loss: 0.5048 - acc.: 1.0000 - val. loss: 0.7022 - val. acc.: 1.0000
Epoch 549
6s - loss: 0.5043 - acc.: 1.0000 - val. loss: 0.7039 - val. acc.: 1.0000
Epoch 550
6s - loss: 0.5033 - acc.: 1.0000 - val. loss: 0.7013 - val. acc.: 1.0000
Epoch 551
6s - loss: 0.5052 - acc.: 1.0000 - val. loss: 0.7013 - val. acc.: 1.0000
Epoch 552
6s - loss: 0.5052 - acc.: 1.0000 - val. loss: 0.7041 - val. acc.: 1.0000
Epoch 553
6s - loss: 0.5033 - acc.: 1.0000 - val. loss: 0.7062 - val. acc.: 1.0000
Epoch 554
6s - loss: 0.5040 - acc.: 1.0000 - val. loss: 0.7042 - val. acc.: 1.0000
Epoch 555
6s - loss: 0.5017 - acc.: 1.0000 - val. loss: 0.7139 - val. acc.: 1.0000
Epoch 556
6s - loss: 0.5042 - acc.: 1.0000 - val. loss: 0.7034 - val. acc.: 1.0000
Epoch 557
6s - loss: 0.5032 - acc.: 1.0000 - val. loss: 0.7069 - val. acc.: 1.0000
Epoch 558
6s - loss: 0.5029 - acc.: 1.0000 - val. loss: 0.7004 - val. acc.: 1.0000
Epoch 559
6s - loss: 0.5013 - acc.: 1.0000 - val. loss: 0.7005 - val. acc.: 1.0000
Epoch 560
6s - loss: 0.5029 - acc.: 1.0000 - val. loss: 0.7046 - val. acc.: 1.0000
Epoch 561
6s - loss: 0.5036 - acc.: 1.0000 - val. loss: 0.7042 - val. acc.: 1.0000
Epoch 562
6s - loss: 0.5043 - acc.: 1.0000 - val. loss: 0.6990 - val. acc.: 1.0000
Epoch 563
6s - loss: 0.5028 - acc.: 1.0000 - val. loss: 0.7056 - val. acc.: 1.0000
Epoch 564
6s - loss: 0.5037 - acc.: 1.0000 - val. loss: 0.7042 - val. acc.: 1.0000
Epoch 565
6s - loss: 0.5011 - acc.: 1.0000 - val. loss: 0.6990 - val. acc.: 1.0000
Epoch 566
6s - loss: 0.5044 - acc.: 1.0000 - val. loss: 0.7026 - val. acc.: 1.0000
Epoch 567
6s - loss: 0.5022 - acc.: 1.0000 - val. loss: 0.6982 - val. acc.: 1.0000
Epoch 568
6s - loss: 0.5002 - acc.: 1.0000 - val. loss: 0.7071 - val. acc.: 1.0000
Epoch 569
6s - loss: 0.5029 - acc.: 1.0000 - val. loss: 0.7080 - val. acc.: 1.0000
Epoch 570
6s - loss: 0.5017 - acc.: 1.0000 - val. loss: 0.7072 - val. acc.: 1.0000
Epoch 571
6s - loss: 0.5041 - acc.: 1.0000 - val. loss: 0.7040 - val. acc.: 1.0000
Epoch 572
6s - loss: 0.5010 - acc.: 1.0000 - val. loss: 0.7027 - val. acc.: 1.0000
Epoch 573
6s - loss: 0.5003 - acc.: 1.0000 - val. loss: 0.6991 - val. acc.: 1.0000
Epoch 574
6s - loss: 0.5031 - acc.: 1.0000 - val. loss: 0.7035 - val. acc.: 1.0000
Epoch 575
6s - loss: 0.5008 - acc.: 1.0000 - val. loss: 0.7073 - val. acc.: 1.0000
Epoch 576
6s - loss: 0.5007 - acc.: 1.0000 - val. loss: 0.7114 - val. acc.: 1.0000
Epoch 577
6s - loss: 0.5033 - acc.: 1.0000 - val. loss: 0.7020 - val. acc.: 1.0000
Epoch 578
6s - loss: 0.5008 - acc.: 1.0000 - val. loss: 0.7072 - val. acc.: 1.0000
Epoch 579
6s - loss: 0.4978 - acc.: 1.0000 - val. loss: 0.7000 - val. acc.: 1.0000
Epoch 580
6s - loss: 0.5020 - acc.: 1.0000 - val. loss: 0.7013 - val. acc.: 1.0000
Epoch 581
6s - loss: 0.5027 - acc.: 1.0000 - val. loss: 0.7052 - val. acc.: 1.0000
Epoch 582
6s - loss: 0.5005 - acc.: 1.0000 - val. loss: 0.6999 - val. acc.: 1.0000
Epoch 583
6s - loss: 0.5007 - acc.: 1.0000 - val. loss: 0.7142 - val. acc.: 1.0000
Epoch 584
6s - loss: 0.5012 - acc.: 1.0000 - val. loss: 0.7030 - val. acc.: 1.0000
Epoch 585
6s - loss: 0.5003 - acc.: 1.0000 - val. loss: 0.7116 - val. acc.: 1.0000
Epoch 586
6s - loss: 0.5013 - acc.: 1.0000 - val. loss: 0.7111 - val. acc.: 1.0000
Epoch 587
6s - loss: 0.5010 - acc.: 1.0000 - val. loss: 0.7033 - val. acc.: 1.0000
Epoch 588
6s - loss: 0.5025 - acc.: 1.0000 - val. loss: 0.7042 - val. acc.: 1.0000
Epoch 589
6s - loss: 0.5010 - acc.: 1.0000 - val. loss: 0.6994 - val. acc.: 1.0000
Epoch 590
6s - loss: 0.4993 - acc.: 1.0000 - val. loss: 0.7025 - val. acc.: 1.0000
Epoch 591
6s - loss: 0.4998 - acc.: 1.0000 - val. loss: 0.7020 - val. acc.: 1.0000
Epoch 592
6s - loss: 0.5015 - acc.: 1.0000 - val. loss: 0.7094 - val. acc.: 1.0000
Epoch 593
6s - loss: 0.5005 - acc.: 1.0000 - val. loss: 0.7071 - val. acc.: 1.0000
Epoch 594
6s - loss: 0.5014 - acc.: 1.0000 - val. loss: 0.7062 - val. acc.: 1.0000
Epoch 595
6s - loss: 0.4974 - acc.: 1.0000 - val. loss: 0.7088 - val. acc.: 1.0000
Epoch 596
6s - loss: 0.5018 - acc.: 1.0000 - val. loss: 0.7049 - val. acc.: 1.0000
Epoch 597
6s - loss: 0.4994 - acc.: 1.0000 - val. loss: 0.7120 - val. acc.: 1.0000
Epoch 598
6s - loss: 0.5012 - acc.: 1.0000 - val. loss: 0.7036 - val. acc.: 1.0000
Epoch 599
6s - loss: 0.4979 - acc.: 1.0000 - val. loss: 0.7053 - val. acc.: 1.0000
Epoch 600
6s - loss: 0.4996 - acc.: 1.0000 - val. loss: 0.7013 - val. acc.: 1.0000
Epoch 601
6s - loss: 0.4997 - acc.: 1.0000 - val. loss: 0.7075 - val. acc.: 1.0000
Epoch 602
6s - loss: 0.5007 - acc.: 1.0000 - val. loss: 0.7011 - val. acc.: 1.0000
Epoch 603
6s - loss: 0.4998 - acc.: 1.0000 - val. loss: 0.6979 - val. acc.: 1.0000
Epoch 604
6s - loss: 0.4971 - acc.: 1.0000 - val. loss: 0.7078 - val. acc.: 1.0000
Epoch 605
6s - loss: 0.4976 - acc.: 1.0000 - val. loss: 0.7117 - val. acc.: 1.0000
Epoch 606
6s - loss: 0.4997 - acc.: 1.0000 - val. loss: 0.7109 - val. acc.: 1.0000
Epoch 607
6s - loss: 0.4968 - acc.: 1.0000 - val. loss: 0.7131 - val. acc.: 1.0000
Epoch 608
6s - loss: 0.4999 - acc.: 1.0000 - val. loss: 0.7054 - val. acc.: 1.0000
Epoch 609
6s - loss: 0.4988 - acc.: 1.0000 - val. loss: 0.7088 - val. acc.: 1.0000
Epoch 610
6s - loss: 0.4986 - acc.: 1.0000 - val. loss: 0.7069 - val. acc.: 1.0000
Epoch 611
6s - loss: 0.4985 - acc.: 1.0000 - val. loss: 0.7050 - val. acc.: 1.0000
Epoch 612
6s - loss: 0.4956 - acc.: 1.0000 - val. loss: 0.7053 - val. acc.: 1.0000
Epoch 613
6s - loss: 0.4961 - acc.: 1.0000 - val. loss: 0.7030 - val. acc.: 1.0000
Epoch 614
6s - loss: 0.4988 - acc.: 1.0000 - val. loss: 0.7120 - val. acc.: 1.0000
Epoch 615
6s - loss: 0.4973 - acc.: 1.0000 - val. loss: 0.7051 - val. acc.: 1.0000
Epoch 616
6s - loss: 0.4949 - acc.: 1.0000 - val. loss: 0.7027 - val. acc.: 1.0000
Epoch 617
6s - loss: 0.4977 - acc.: 1.0000 - val. loss: 0.7091 - val. acc.: 1.0000
Epoch 618
6s - loss: 0.4976 - acc.: 1.0000 - val. loss: 0.7081 - val. acc.: 1.0000
Epoch 619
6s - loss: 0.4966 - acc.: 1.0000 - val. loss: 0.7033 - val. acc.: 1.0000
Epoch 620
6s - loss: 0.4970 - acc.: 1.0000 - val. loss: 0.7108 - val. acc.: 1.0000
Epoch 621
6s - loss: 0.4955 - acc.: 1.0000 - val. loss: 0.6998 - val. acc.: 1.0000
Epoch 622
6s - loss: 0.4981 - acc.: 1.0000 - val. loss: 0.7086 - val. acc.: 1.0000
Epoch 623
6s - loss: 0.4970 - acc.: 1.0000 - val. loss: 0.7033 - val. acc.: 1.0000
Epoch 624
6s - loss: 0.4971 - acc.: 1.0000 - val. loss: 0.7020 - val. acc.: 1.0000
Epoch 625
6s - loss: 0.4975 - acc.: 1.0000 - val. loss: 0.7091 - val. acc.: 1.0000
Epoch 626
6s - loss: 0.4949 - acc.: 1.0000 - val. loss: 0.7095 - val. acc.: 1.0000
Epoch 627
6s - loss: 0.4973 - acc.: 1.0000 - val. loss: 0.7098 - val. acc.: 1.0000
Epoch 628
6s - loss: 0.4974 - acc.: 1.0000 - val. loss: 0.7101 - val. acc.: 1.0000
Epoch 629
6s - loss: 0.4963 - acc.: 1.0000 - val. loss: 0.7055 - val. acc.: 1.0000
Epoch 630
6s - loss: 0.4961 - acc.: 1.0000 - val. loss: 0.7048 - val. acc.: 1.0000
Epoch 631
6s - loss: 0.4957 - acc.: 1.0000 - val. loss: 0.7023 - val. acc.: 1.0000
Epoch 632
6s - loss: 0.4964 - acc.: 1.0000 - val. loss: 0.7090 - val. acc.: 1.0000
Epoch 633
6s - loss: 0.4984 - acc.: 1.0000 - val. loss: 0.7106 - val. acc.: 1.0000
Epoch 634
6s - loss: 0.4937 - acc.: 1.0000 - val. loss: 0.7093 - val. acc.: 1.0000
Epoch 635
6s - loss: 0.4966 - acc.: 1.0000 - val. loss: 0.7055 - val. acc.: 1.0000
Epoch 636
6s - loss: 0.4922 - acc.: 1.0000 - val. loss: 0.7035 - val. acc.: 1.0000
Epoch 637
6s - loss: 0.4953 - acc.: 1.0000 - val. loss: 0.7100 - val. acc.: 1.0000
Epoch 638
6s - loss: 0.4961 - acc.: 1.0000 - val. loss: 0.7035 - val. acc.: 1.0000
Epoch 639
6s - loss: 0.4939 - acc.: 1.0000 - val. loss: 0.7061 - val. acc.: 1.0000
Epoch 640
6s - loss: 0.4962 - acc.: 1.0000 - val. loss: 0.7033 - val. acc.: 1.0000
Epoch 641
6s - loss: 0.4940 - acc.: 1.0000 - val. loss: 0.7088 - val. acc.: 1.0000
Epoch 642
6s - loss: 0.4948 - acc.: 1.0000 - val. loss: 0.7055 - val. acc.: 1.0000
Epoch 643
6s - loss: 0.4948 - acc.: 1.0000 - val. loss: 0.7050 - val. acc.: 1.0000
Epoch 644
6s - loss: 0.4934 - acc.: 1.0000 - val. loss: 0.7065 - val. acc.: 1.0000
Epoch 645
6s - loss: 0.4918 - acc.: 1.0000 - val. loss: 0.7075 - val. acc.: 1.0000
Epoch 646
6s - loss: 0.4939 - acc.: 1.0000 - val. loss: 0.7068 - val. acc.: 1.0000
Epoch 647
6s - loss: 0.4936 - acc.: 1.0000 - val. loss: 0.7071 - val. acc.: 1.0000
Epoch 648
6s - loss: 0.4935 - acc.: 1.0000 - val. loss: 0.7138 - val. acc.: 1.0000
Epoch 649
6s - loss: 0.4937 - acc.: 1.0000 - val. loss: 0.6986 - val. acc.: 1.0000
Epoch 650
6s - loss: 0.4946 - acc.: 1.0000 - val. loss: 0.7125 - val. acc.: 1.0000
Epoch 651
6s - loss: 0.4926 - acc.: 1.0000 - val. loss: 0.7072 - val. acc.: 1.0000
Epoch 652
6s - loss: 0.4953 - acc.: 1.0000 - val. loss: 0.6998 - val. acc.: 1.0000
Epoch 653
6s - loss: 0.4953 - acc.: 1.0000 - val. loss: 0.7130 - val. acc.: 1.0000
Epoch 654
6s - loss: 0.4937 - acc.: 1.0000 - val. loss: 0.7066 - val. acc.: 1.0000
Epoch 655
6s - loss: 0.4944 - acc.: 1.0000 - val. loss: 0.7035 - val. acc.: 1.0000
Epoch 656
6s - loss: 0.4951 - acc.: 1.0000 - val. loss: 0.6978 - val. acc.: 1.0000
Epoch 657
6s - loss: 0.4943 - acc.: 1.0000 - val. loss: 0.7019 - val. acc.: 1.0000
Epoch 658
6s - loss: 0.4928 - acc.: 1.0000 - val. loss: 0.7046 - val. acc.: 1.0000
Epoch 659
6s - loss: 0.4931 - acc.: 1.0000 - val. loss: 0.7121 - val. acc.: 1.0000
Epoch 660
6s - loss: 0.4931 - acc.: 1.0000 - val. loss: 0.7012 - val. acc.: 1.0000
Epoch 661
6s - loss: 0.4923 - acc.: 1.0000 - val. loss: 0.7111 - val. acc.: 1.0000
Epoch 662
6s - loss: 0.4935 - acc.: 1.0000 - val. loss: 0.7103 - val. acc.: 1.0000
Epoch 663
6s - loss: 0.4932 - acc.: 1.0000 - val. loss: 0.7068 - val. acc.: 1.0000
Epoch 664
6s - loss: 0.4931 - acc.: 1.0000 - val. loss: 0.7118 - val. acc.: 1.0000
Epoch 665
5s - loss: 0.4929 - acc.: 1.0000 - val. loss: 0.7046 - val. acc.: 1.0000
Epoch 666
6s - loss: 0.4930 - acc.: 1.0000 - val. loss: 0.7043 - val. acc.: 1.0000
Epoch 667
6s - loss: 0.4951 - acc.: 1.0000 - val. loss: 0.7073 - val. acc.: 1.0000
Epoch 668
6s - loss: 0.4931 - acc.: 1.0000 - val. loss: 0.7033 - val. acc.: 1.0000
Epoch 669
6s - loss: 0.4931 - acc.: 1.0000 - val. loss: 0.7004 - val. acc.: 1.0000
Epoch 670
6s - loss: 0.4930 - acc.: 1.0000 - val. loss: 0.7066 - val. acc.: 1.0000
Epoch 671
6s - loss: 0.4946 - acc.: 1.0000 - val. loss: 0.7108 - val. acc.: 1.0000
Epoch 672
6s - loss: 0.4936 - acc.: 1.0000 - val. loss: 0.7073 - val. acc.: 1.0000
Epoch 673
6s - loss: 0.4949 - acc.: 1.0000 - val. loss: 0.7059 - val. acc.: 1.0000
Epoch 674
6s - loss: 0.4905 - acc.: 1.0000 - val. loss: 0.7097 - val. acc.: 1.0000
Epoch 675
6s - loss: 0.4892 - acc.: 1.0000 - val. loss: 0.7016 - val. acc.: 1.0000
Epoch 676
6s - loss: 0.4931 - acc.: 1.0000 - val. loss: 0.7024 - val. acc.: 1.0000
Epoch 677
6s - loss: 0.4922 - acc.: 1.0000 - val. loss: 0.7064 - val. acc.: 1.0000
Epoch 678
6s - loss: 0.4922 - acc.: 1.0000 - val. loss: 0.7024 - val. acc.: 1.0000
Epoch 679
6s - loss: 0.4916 - acc.: 1.0000 - val. loss: 0.7104 - val. acc.: 1.0000
Epoch 680
6s - loss: 0.4946 - acc.: 1.0000 - val. loss: 0.7199 - val. acc.: 1.0000
Epoch 681
6s - loss: 0.4914 - acc.: 1.0000 - val. loss: 0.7054 - val. acc.: 1.0000
Epoch 682
6s - loss: 0.4913 - acc.: 1.0000 - val. loss: 0.7031 - val. acc.: 1.0000
Epoch 683
6s - loss: 0.4929 - acc.: 1.0000 - val. loss: 0.7010 - val. acc.: 1.0000
Epoch 684
6s - loss: 0.4892 - acc.: 1.0000 - val. loss: 0.7053 - val. acc.: 1.0000
Epoch 685
6s - loss: 0.4909 - acc.: 1.0000 - val. loss: 0.7098 - val. acc.: 1.0000
Epoch 686
6s - loss: 0.4922 - acc.: 1.0000 - val. loss: 0.7059 - val. acc.: 1.0000
Epoch 687
6s - loss: 0.4914 - acc.: 1.0000 - val. loss: 0.7078 - val. acc.: 1.0000
Epoch 688
6s - loss: 0.4920 - acc.: 1.0000 - val. loss: 0.7037 - val. acc.: 1.0000
Epoch 689
6s - loss: 0.4903 - acc.: 1.0000 - val. loss: 0.7036 - val. acc.: 1.0000
Epoch 690
6s - loss: 0.4905 - acc.: 1.0000 - val. loss: 0.7027 - val. acc.: 1.0000
Epoch 691
6s - loss: 0.4913 - acc.: 1.0000 - val. loss: 0.7078 - val. acc.: 1.0000
Epoch 692
6s - loss: 0.4899 - acc.: 1.0000 - val. loss: 0.7108 - val. acc.: 1.0000
Epoch 693
6s - loss: 0.4898 - acc.: 1.0000 - val. loss: 0.7055 - val. acc.: 1.0000
Epoch 694
6s - loss: 0.4904 - acc.: 1.0000 - val. loss: 0.7034 - val. acc.: 1.0000
Epoch 695
6s - loss: 0.4913 - acc.: 1.0000 - val. loss: 0.7019 - val. acc.: 1.0000
Epoch 696
6s - loss: 0.4906 - acc.: 1.0000 - val. loss: 0.7087 - val. acc.: 1.0000
Epoch 697
6s - loss: 0.4900 - acc.: 1.0000 - val. loss: 0.7042 - val. acc.: 1.0000
Epoch 698
6s - loss: 0.4904 - acc.: 1.0000 - val. loss: 0.7038 - val. acc.: 1.0000
Epoch 699
6s - loss: 0.4904 - acc.: 1.0000 - val. loss: 0.7101 - val. acc.: 1.0000
Epoch 700
6s - loss: 0.4908 - acc.: 1.0000 - val. loss: 0.7053 - val. acc.: 1.0000
Epoch 701
6s - loss: 0.4895 - acc.: 1.0000 - val. loss: 0.7134 - val. acc.: 1.0000
Epoch 702
6s - loss: 0.4879 - acc.: 1.0000 - val. loss: 0.7039 - val. acc.: 1.0000
Epoch 703
6s - loss: 0.4897 - acc.: 1.0000 - val. loss: 0.7032 - val. acc.: 1.0000
Epoch 704
6s - loss: 0.4903 - acc.: 1.0000 - val. loss: 0.7036 - val. acc.: 1.0000
Epoch 705
6s - loss: 0.4913 - acc.: 1.0000 - val. loss: 0.7005 - val. acc.: 1.0000
Epoch 706
6s - loss: 0.4913 - acc.: 1.0000 - val. loss: 0.7074 - val. acc.: 1.0000
Epoch 707
6s - loss: 0.4894 - acc.: 1.0000 - val. loss: 0.7005 - val. acc.: 1.0000
Epoch 708
6s - loss: 0.4903 - acc.: 1.0000 - val. loss: 0.7141 - val. acc.: 1.0000
Epoch 709
6s - loss: 0.4891 - acc.: 1.0000 - val. loss: 0.7055 - val. acc.: 1.0000
Epoch 710
6s - loss: 0.4872 - acc.: 1.0000 - val. loss: 0.7070 - val. acc.: 1.0000
Epoch 711
6s - loss: 0.4892 - acc.: 1.0000 - val. loss: 0.7079 - val. acc.: 1.0000
Epoch 712
6s - loss: 0.4884 - acc.: 1.0000 - val. loss: 0.7046 - val. acc.: 1.0000
Epoch 713
6s - loss: 0.4906 - acc.: 1.0000 - val. loss: 0.7047 - val. acc.: 1.0000
Epoch 714
6s - loss: 0.4914 - acc.: 1.0000 - val. loss: 0.7004 - val. acc.: 1.0000
Epoch 715
6s - loss: 0.4891 - acc.: 1.0000 - val. loss: 0.7069 - val. acc.: 1.0000
Epoch 716
6s - loss: 0.4878 - acc.: 1.0000 - val. loss: 0.7061 - val. acc.: 1.0000
Epoch 717
6s - loss: 0.4879 - acc.: 1.0000 - val. loss: 0.7046 - val. acc.: 1.0000
Epoch 718
6s - loss: 0.4878 - acc.: 1.0000 - val. loss: 0.7105 - val. acc.: 1.0000
Epoch 719
6s - loss: 0.4898 - acc.: 1.0000 - val. loss: 0.7091 - val. acc.: 1.0000
Epoch 720
6s - loss: 0.4900 - acc.: 1.0000 - val. loss: 0.7109 - val. acc.: 1.0000
Epoch 721
6s - loss: 0.4899 - acc.: 1.0000 - val. loss: 0.7046 - val. acc.: 1.0000
Epoch 722
6s - loss: 0.4865 - acc.: 1.0000 - val. loss: 0.7219 - val. acc.: 1.0000
Epoch 723
6s - loss: 0.4878 - acc.: 1.0000 - val. loss: 0.7011 - val. acc.: 1.0000
Epoch 724
6s - loss: 0.4886 - acc.: 1.0000 - val. loss: 0.7155 - val. acc.: 1.0000
Epoch 725
6s - loss: 0.4866 - acc.: 1.0000 - val. loss: 0.7116 - val. acc.: 1.0000
Epoch 726
6s - loss: 0.4900 - acc.: 1.0000 - val. loss: 0.7068 - val. acc.: 1.0000
Epoch 727
6s - loss: 0.4880 - acc.: 1.0000 - val. loss: 0.7044 - val. acc.: 1.0000
Epoch 728
6s - loss: 0.4882 - acc.: 1.0000 - val. loss: 0.7056 - val. acc.: 1.0000
Epoch 729
6s - loss: 0.4872 - acc.: 1.0000 - val. loss: 0.7058 - val. acc.: 1.0000
Epoch 730
6s - loss: 0.4862 - acc.: 1.0000 - val. loss: 0.7120 - val. acc.: 1.0000
Epoch 731
6s - loss: 0.4880 - acc.: 1.0000 - val. loss: 0.7089 - val. acc.: 1.0000
Epoch 732
6s - loss: 0.4866 - acc.: 1.0000 - val. loss: 0.7049 - val. acc.: 1.0000
Epoch 733
6s - loss: 0.4856 - acc.: 1.0000 - val. loss: 0.7008 - val. acc.: 1.0000
Epoch 734
6s - loss: 0.4867 - acc.: 1.0000 - val. loss: 0.7022 - val. acc.: 1.0000
Epoch 735
6s - loss: 0.4861 - acc.: 1.0000 - val. loss: 0.7076 - val. acc.: 1.0000
Epoch 736
6s - loss: 0.4849 - acc.: 1.0000 - val. loss: 0.7071 - val. acc.: 1.0000
Epoch 737
6s - loss: 0.4887 - acc.: 1.0000 - val. loss: 0.7042 - val. acc.: 1.0000
Epoch 738
6s - loss: 0.4879 - acc.: 1.0000 - val. loss: 0.7031 - val. acc.: 1.0000
Epoch 739
6s - loss: 0.4869 - acc.: 1.0000 - val. loss: 0.7030 - val. acc.: 1.0000
Epoch 740
6s - loss: 0.4880 - acc.: 1.0000 - val. loss: 0.7004 - val. acc.: 1.0000
Epoch 741
6s - loss: 0.4865 - acc.: 1.0000 - val. loss: 0.7058 - val. acc.: 1.0000
Epoch 742
6s - loss: 0.4875 - acc.: 1.0000 - val. loss: 0.7020 - val. acc.: 1.0000
Epoch 743
6s - loss: 0.4843 - acc.: 1.0000 - val. loss: 0.7091 - val. acc.: 1.0000
Epoch 744
6s - loss: 0.4885 - acc.: 1.0000 - val. loss: 0.7053 - val. acc.: 1.0000
Epoch 745
6s - loss: 0.4857 - acc.: 1.0000 - val. loss: 0.7055 - val. acc.: 1.0000
Epoch 746
6s - loss: 0.4857 - acc.: 1.0000 - val. loss: 0.7083 - val. acc.: 1.0000
Epoch 747
6s - loss: 0.4838 - acc.: 1.0000 - val. loss: 0.7092 - val. acc.: 1.0000
Epoch 748
6s - loss: 0.4856 - acc.: 1.0000 - val. loss: 0.7083 - val. acc.: 1.0000
Epoch 749
6s - loss: 0.4863 - acc.: 1.0000 - val. loss: 0.7047 - val. acc.: 1.0000
Epoch 750
6s - loss: 0.4860 - acc.: 1.0000 - val. loss: 0.7019 - val. acc.: 1.0000
Epoch 751
6s - loss: 0.4844 - acc.: 1.0000 - val. loss: 0.7085 - val. acc.: 1.0000
Epoch 752
6s - loss: 0.4867 - acc.: 1.0000 - val. loss: 0.7091 - val. acc.: 1.0000
Epoch 753
6s - loss: 0.4844 - acc.: 1.0000 - val. loss: 0.7087 - val. acc.: 1.0000
Epoch 754
6s - loss: 0.4848 - acc.: 1.0000 - val. loss: 0.7045 - val. acc.: 1.0000
Epoch 755
6s - loss: 0.4859 - acc.: 1.0000 - val. loss: 0.7032 - val. acc.: 1.0000
Epoch 756
6s - loss: 0.4872 - acc.: 1.0000 - val. loss: 0.7087 - val. acc.: 1.0000
Epoch 757
6s - loss: 0.4841 - acc.: 1.0000 - val. loss: 0.7062 - val. acc.: 1.0000
Epoch 758
6s - loss: 0.4860 - acc.: 1.0000 - val. loss: 0.7059 - val. acc.: 1.0000
Epoch 759
6s - loss: 0.4862 - acc.: 1.0000 - val. loss: 0.7079 - val. acc.: 1.0000
Epoch 760
6s - loss: 0.4881 - acc.: 1.0000 - val. loss: 0.7068 - val. acc.: 1.0000
Epoch 761
6s - loss: 0.4848 - acc.: 1.0000 - val. loss: 0.7111 - val. acc.: 1.0000
Epoch 762
6s - loss: 0.4868 - acc.: 1.0000 - val. loss: 0.7113 - val. acc.: 1.0000
Epoch 763
6s - loss: 0.4839 - acc.: 1.0000 - val. loss: 0.7092 - val. acc.: 1.0000
Epoch 764
6s - loss: 0.4854 - acc.: 1.0000 - val. loss: 0.7063 - val. acc.: 1.0000
Epoch 765
6s - loss: 0.4845 - acc.: 1.0000 - val. loss: 0.7052 - val. acc.: 1.0000
Epoch 766
6s - loss: 0.4854 - acc.: 1.0000 - val. loss: 0.7048 - val. acc.: 1.0000
Epoch 767
6s - loss: 0.4838 - acc.: 1.0000 - val. loss: 0.7013 - val. acc.: 1.0000
Epoch 768
6s - loss: 0.4858 - acc.: 1.0000 - val. loss: 0.7119 - val. acc.: 1.0000
Epoch 769
6s - loss: 0.4854 - acc.: 1.0000 - val. loss: 0.7056 - val. acc.: 1.0000
Epoch 770
6s - loss: 0.4862 - acc.: 1.0000 - val. loss: 0.7089 - val. acc.: 1.0000
Epoch 771
6s - loss: 0.4841 - acc.: 1.0000 - val. loss: 0.7066 - val. acc.: 1.0000
Epoch 772
6s - loss: 0.4853 - acc.: 1.0000 - val. loss: 0.7079 - val. acc.: 1.0000
Epoch 773
6s - loss: 0.4862 - acc.: 1.0000 - val. loss: 0.7064 - val. acc.: 1.0000
Epoch 774
6s - loss: 0.4839 - acc.: 1.0000 - val. loss: 0.7033 - val. acc.: 1.0000
Epoch 775
6s - loss: 0.4854 - acc.: 1.0000 - val. loss: 0.7054 - val. acc.: 1.0000
Epoch 776
6s - loss: 0.4815 - acc.: 1.0000 - val. loss: 0.7006 - val. acc.: 1.0000
Epoch 777
6s - loss: 0.4851 - acc.: 1.0000 - val. loss: 0.7025 - val. acc.: 1.0000
Epoch 778
6s - loss: 0.4823 - acc.: 1.0000 - val. loss: 0.7098 - val. acc.: 1.0000
Epoch 779
6s - loss: 0.4850 - acc.: 1.0000 - val. loss: 0.7122 - val. acc.: 1.0000
Epoch 780
6s - loss: 0.4841 - acc.: 1.0000 - val. loss: 0.7109 - val. acc.: 1.0000
Epoch 781
6s - loss: 0.4854 - acc.: 1.0000 - val. loss: 0.7117 - val. acc.: 1.0000
Epoch 782
6s - loss: 0.4848 - acc.: 1.0000 - val. loss: 0.7125 - val. acc.: 1.0000
Epoch 783
6s - loss: 0.4856 - acc.: 1.0000 - val. loss: 0.7073 - val. acc.: 1.0000
Epoch 784
6s - loss: 0.4849 - acc.: 1.0000 - val. loss: 0.7092 - val. acc.: 1.0000
Epoch 785
6s - loss: 0.4859 - acc.: 1.0000 - val. loss: 0.7084 - val. acc.: 1.0000
Epoch 786
6s - loss: 0.4831 - acc.: 1.0000 - val. loss: 0.7090 - val. acc.: 1.0000
Epoch 787
6s - loss: 0.4845 - acc.: 1.0000 - val. loss: 0.7094 - val. acc.: 1.0000
Epoch 788
6s - loss: 0.4815 - acc.: 1.0000 - val. loss: 0.7019 - val. acc.: 1.0000
Epoch 789
6s - loss: 0.4826 - acc.: 1.0000 - val. loss: 0.7076 - val. acc.: 1.0000
Epoch 790
6s - loss: 0.4867 - acc.: 1.0000 - val. loss: 0.7130 - val. acc.: 1.0000
Epoch 791
6s - loss: 0.4825 - acc.: 1.0000 - val. loss: 0.7076 - val. acc.: 1.0000
Epoch 792
6s - loss: 0.4826 - acc.: 1.0000 - val. loss: 0.7143 - val. acc.: 1.0000
Epoch 793
6s - loss: 0.4833 - acc.: 1.0000 - val. loss: 0.7149 - val. acc.: 1.0000
Epoch 794
6s - loss: 0.4840 - acc.: 1.0000 - val. loss: 0.7136 - val. acc.: 1.0000
Epoch 795
6s - loss: 0.4839 - acc.: 1.0000 - val. loss: 0.7110 - val. acc.: 1.0000
Epoch 796
6s - loss: 0.4826 - acc.: 1.0000 - val. loss: 0.7193 - val. acc.: 1.0000
Epoch 797
6s - loss: 0.4841 - acc.: 1.0000 - val. loss: 0.7125 - val. acc.: 1.0000
Epoch 798
6s - loss: 0.4819 - acc.: 1.0000 - val. loss: 0.7072 - val. acc.: 1.0000
Epoch 799
6s - loss: 0.4815 - acc.: 1.0000 - val. loss: 0.7059 - val. acc.: 1.0000
Epoch 800
6s - loss: 0.4840 - acc.: 1.0000 - val. loss: 0.7116 - val. acc.: 1.0000
Epoch 801
6s - loss: 0.4827 - acc.: 1.0000 - val. loss: 0.7050 - val. acc.: 1.0000
Epoch 802
6s - loss: 0.4835 - acc.: 1.0000 - val. loss: 0.7032 - val. acc.: 1.0000
Epoch 803
6s - loss: 0.4834 - acc.: 1.0000 - val. loss: 0.7052 - val. acc.: 1.0000
Epoch 804
6s - loss: 0.4834 - acc.: 1.0000 - val. loss: 0.7051 - val. acc.: 1.0000
Epoch 805
6s - loss: 0.4824 - acc.: 1.0000 - val. loss: 0.7148 - val. acc.: 1.0000
Epoch 806
6s - loss: 0.4835 - acc.: 1.0000 - val. loss: 0.7091 - val. acc.: 1.0000
Epoch 807
6s - loss: 0.4832 - acc.: 1.0000 - val. loss: 0.7061 - val. acc.: 1.0000
Epoch 808
6s - loss: 0.4822 - acc.: 1.0000 - val. loss: 0.7044 - val. acc.: 1.0000
Epoch 809
6s - loss: 0.4819 - acc.: 1.0000 - val. loss: 0.7046 - val. acc.: 1.0000
Epoch 810
6s - loss: 0.4820 - acc.: 1.0000 - val. loss: 0.7086 - val. acc.: 1.0000
Epoch 811
6s - loss: 0.4835 - acc.: 1.0000 - val. loss: 0.6983 - val. acc.: 1.0000
Epoch 812
6s - loss: 0.4819 - acc.: 1.0000 - val. loss: 0.7112 - val. acc.: 1.0000
Epoch 813
6s - loss: 0.4827 - acc.: 1.0000 - val. loss: 0.7085 - val. acc.: 1.0000
Epoch 814
6s - loss: 0.4816 - acc.: 1.0000 - val. loss: 0.7117 - val. acc.: 1.0000
Epoch 815
6s - loss: 0.4846 - acc.: 1.0000 - val. loss: 0.7034 - val. acc.: 1.0000
Epoch 816
6s - loss: 0.4819 - acc.: 1.0000 - val. loss: 0.7079 - val. acc.: 1.0000
Epoch 817
6s - loss: 0.4811 - acc.: 1.0000 - val. loss: 0.7100 - val. acc.: 1.0000
Epoch 818
6s - loss: 0.4811 - acc.: 1.0000 - val. loss: 0.7089 - val. acc.: 1.0000
Epoch 819
6s - loss: 0.4803 - acc.: 1.0000 - val. loss: 0.6997 - val. acc.: 1.0000
Epoch 820
6s - loss: 0.4819 - acc.: 1.0000 - val. loss: 0.7032 - val. acc.: 1.0000
Epoch 821
6s - loss: 0.4792 - acc.: 1.0000 - val. loss: 0.7001 - val. acc.: 1.0000
Epoch 822
6s - loss: 0.4813 - acc.: 1.0000 - val. loss: 0.7045 - val. acc.: 1.0000
Epoch 823
6s - loss: 0.4802 - acc.: 1.0000 - val. loss: 0.6971 - val. acc.: 1.0000
Epoch 824
6s - loss: 0.4838 - acc.: 1.0000 - val. loss: 0.7019 - val. acc.: 1.0000
Epoch 825
6s - loss: 0.4829 - acc.: 1.0000 - val. loss: 0.7049 - val. acc.: 1.0000
Epoch 826
6s - loss: 0.4812 - acc.: 1.0000 - val. loss: 0.7037 - val. acc.: 1.0000
Epoch 827
6s - loss: 0.4787 - acc.: 1.0000 - val. loss: 0.7063 - val. acc.: 1.0000
Epoch 828
6s - loss: 0.4823 - acc.: 1.0000 - val. loss: 0.7036 - val. acc.: 1.0000
Epoch 829
6s - loss: 0.4811 - acc.: 1.0000 - val. loss: 0.7073 - val. acc.: 1.0000
Epoch 830
6s - loss: 0.4784 - acc.: 1.0000 - val. loss: 0.7144 - val. acc.: 1.0000
Epoch 831
6s - loss: 0.4800 - acc.: 1.0000 - val. loss: 0.7049 - val. acc.: 1.0000
Epoch 832
6s - loss: 0.4810 - acc.: 1.0000 - val. loss: 0.7021 - val. acc.: 1.0000
Epoch 833
6s - loss: 0.4827 - acc.: 1.0000 - val. loss: 0.7063 - val. acc.: 1.0000
Epoch 834
6s - loss: 0.4818 - acc.: 1.0000 - val. loss: 0.7080 - val. acc.: 1.0000
Epoch 835
6s - loss: 0.4806 - acc.: 1.0000 - val. loss: 0.7130 - val. acc.: 1.0000
Epoch 836
6s - loss: 0.4813 - acc.: 1.0000 - val. loss: 0.7074 - val. acc.: 1.0000
Epoch 837
6s - loss: 0.4796 - acc.: 1.0000 - val. loss: 0.7027 - val. acc.: 1.0000
Epoch 838
6s - loss: 0.4808 - acc.: 1.0000 - val. loss: 0.7079 - val. acc.: 1.0000
Epoch 839
6s - loss: 0.4809 - acc.: 1.0000 - val. loss: 0.7063 - val. acc.: 1.0000
Epoch 840
6s - loss: 0.4791 - acc.: 1.0000 - val. loss: 0.7056 - val. acc.: 1.0000
Epoch 841
6s - loss: 0.4784 - acc.: 1.0000 - val. loss: 0.7003 - val. acc.: 1.0000
Epoch 842
6s - loss: 0.4804 - acc.: 1.0000 - val. loss: 0.7179 - val. acc.: 1.0000
Epoch 843
6s - loss: 0.4803 - acc.: 1.0000 - val. loss: 0.7096 - val. acc.: 1.0000
Epoch 844
6s - loss: 0.4778 - acc.: 1.0000 - val. loss: 0.7129 - val. acc.: 1.0000
Epoch 845
6s - loss: 0.4798 - acc.: 1.0000 - val. loss: 0.7045 - val. acc.: 1.0000
Epoch 846
6s - loss: 0.4796 - acc.: 1.0000 - val. loss: 0.7037 - val. acc.: 1.0000
Epoch 847
6s - loss: 0.4789 - acc.: 1.0000 - val. loss: 0.7033 - val. acc.: 1.0000
Epoch 848
6s - loss: 0.4782 - acc.: 1.0000 - val. loss: 0.7073 - val. acc.: 1.0000
Epoch 849
6s - loss: 0.4792 - acc.: 1.0000 - val. loss: 0.7050 - val. acc.: 1.0000
Epoch 850
6s - loss: 0.4781 - acc.: 1.0000 - val. loss: 0.7083 - val. acc.: 1.0000
Epoch 851
6s - loss: 0.4788 - acc.: 1.0000 - val. loss: 0.7002 - val. acc.: 1.0000
Epoch 852
6s - loss: 0.4799 - acc.: 1.0000 - val. loss: 0.6990 - val. acc.: 1.0000
Epoch 853
6s - loss: 0.4797 - acc.: 1.0000 - val. loss: 0.7048 - val. acc.: 1.0000
Epoch 854
6s - loss: 0.4792 - acc.: 1.0000 - val. loss: 0.7048 - val. acc.: 1.0000
Epoch 855
6s - loss: 0.4804 - acc.: 1.0000 - val. loss: 0.7039 - val. acc.: 1.0000
Epoch 856
6s - loss: 0.4818 - acc.: 1.0000 - val. loss: 0.6995 - val. acc.: 1.0000
Epoch 857
6s - loss: 0.4794 - acc.: 1.0000 - val. loss: 0.7093 - val. acc.: 1.0000
Epoch 858
6s - loss: 0.4775 - acc.: 1.0000 - val. loss: 0.7045 - val. acc.: 1.0000
Epoch 859
6s - loss: 0.4798 - acc.: 1.0000 - val. loss: 0.6997 - val. acc.: 1.0000
Epoch 860
6s - loss: 0.4789 - acc.: 1.0000 - val. loss: 0.7059 - val. acc.: 1.0000
Epoch 861
6s - loss: 0.4758 - acc.: 1.0000 - val. loss: 0.7058 - val. acc.: 1.0000
Epoch 862
6s - loss: 0.4791 - acc.: 1.0000 - val. loss: 0.7038 - val. acc.: 1.0000
Epoch 863
6s - loss: 0.4775 - acc.: 1.0000 - val. loss: 0.7060 - val. acc.: 1.0000
Epoch 864
6s - loss: 0.4789 - acc.: 1.0000 - val. loss: 0.7067 - val. acc.: 1.0000
Epoch 865
6s - loss: 0.4760 - acc.: 1.0000 - val. loss: 0.7026 - val. acc.: 1.0000
Epoch 866
6s - loss: 0.4779 - acc.: 1.0000 - val. loss: 0.7025 - val. acc.: 1.0000
Epoch 867
6s - loss: 0.4800 - acc.: 1.0000 - val. loss: 0.7028 - val. acc.: 1.0000
Epoch 868
6s - loss: 0.4772 - acc.: 1.0000 - val. loss: 0.6987 - val. acc.: 1.0000
Epoch 869
6s - loss: 0.4759 - acc.: 1.0000 - val. loss: 0.7064 - val. acc.: 1.0000
Epoch 870
6s - loss: 0.4785 - acc.: 1.0000 - val. loss: 0.7031 - val. acc.: 1.0000
Epoch 871
6s - loss: 0.4780 - acc.: 1.0000 - val. loss: 0.7054 - val. acc.: 1.0000
Epoch 872
6s - loss: 0.4792 - acc.: 1.0000 - val. loss: 0.7031 - val. acc.: 1.0000
Epoch 873
6s - loss: 0.4803 - acc.: 1.0000 - val. loss: 0.7037 - val. acc.: 1.0000
Epoch 874
6s - loss: 0.4755 - acc.: 1.0000 - val. loss: 0.7074 - val. acc.: 1.0000
Epoch 875
6s - loss: 0.4777 - acc.: 1.0000 - val. loss: 0.7056 - val. acc.: 1.0000
Epoch 876
6s - loss: 0.4784 - acc.: 1.0000 - val. loss: 0.7067 - val. acc.: 1.0000
Epoch 877
6s - loss: 0.4755 - acc.: 1.0000 - val. loss: 0.7086 - val. acc.: 1.0000
Epoch 878
6s - loss: 0.4779 - acc.: 1.0000 - val. loss: 0.7060 - val. acc.: 1.0000
Epoch 879
6s - loss: 0.4793 - acc.: 1.0000 - val. loss: 0.7052 - val. acc.: 1.0000
Epoch 880
6s - loss: 0.4780 - acc.: 1.0000 - val. loss: 0.7137 - val. acc.: 1.0000
Epoch 881
6s - loss: 0.4755 - acc.: 1.0000 - val. loss: 0.7069 - val. acc.: 1.0000
Epoch 882
6s - loss: 0.4762 - acc.: 1.0000 - val. loss: 0.7026 - val. acc.: 1.0000
Epoch 883
6s - loss: 0.4747 - acc.: 1.0000 - val. loss: 0.7068 - val. acc.: 1.0000
Epoch 884
6s - loss: 0.4775 - acc.: 1.0000 - val. loss: 0.7101 - val. acc.: 1.0000
Epoch 885
6s - loss: 0.4771 - acc.: 1.0000 - val. loss: 0.7113 - val. acc.: 1.0000
Epoch 886
6s - loss: 0.4779 - acc.: 1.0000 - val. loss: 0.7111 - val. acc.: 1.0000
Epoch 887
6s - loss: 0.4773 - acc.: 1.0000 - val. loss: 0.7029 - val. acc.: 1.0000
Epoch 888
6s - loss: 0.4762 - acc.: 1.0000 - val. loss: 0.7103 - val. acc.: 1.0000
Epoch 889
6s - loss: 0.4780 - acc.: 1.0000 - val. loss: 0.7052 - val. acc.: 1.0000
Epoch 890
6s - loss: 0.4765 - acc.: 1.0000 - val. loss: 0.7074 - val. acc.: 1.0000
Epoch 891
6s - loss: 0.4769 - acc.: 1.0000 - val. loss: 0.7063 - val. acc.: 1.0000
Epoch 892
6s - loss: 0.4772 - acc.: 1.0000 - val. loss: 0.7060 - val. acc.: 1.0000
Epoch 893
6s - loss: 0.4783 - acc.: 1.0000 - val. loss: 0.7044 - val. acc.: 1.0000
Epoch 894
6s - loss: 0.4780 - acc.: 1.0000 - val. loss: 0.7052 - val. acc.: 1.0000
Epoch 895
6s - loss: 0.4772 - acc.: 1.0000 - val. loss: 0.7066 - val. acc.: 1.0000
Epoch 896
6s - loss: 0.4767 - acc.: 1.0000 - val. loss: 0.7101 - val. acc.: 1.0000
Epoch 897
6s - loss: 0.4759 - acc.: 1.0000 - val. loss: 0.7033 - val. acc.: 1.0000
Epoch 898
6s - loss: 0.4760 - acc.: 1.0000 - val. loss: 0.7037 - val. acc.: 1.0000
Epoch 899
6s - loss: 0.4762 - acc.: 1.0000 - val. loss: 0.7020 - val. acc.: 1.0000
Epoch 900
6s - loss: 0.4758 - acc.: 1.0000 - val. loss: 0.7069 - val. acc.: 1.0000
Epoch 901
6s - loss: 0.4780 - acc.: 1.0000 - val. loss: 0.7107 - val. acc.: 1.0000
Epoch 902
6s - loss: 0.4760 - acc.: 1.0000 - val. loss: 0.7128 - val. acc.: 1.0000
Epoch 903
6s - loss: 0.4777 - acc.: 1.0000 - val. loss: 0.7000 - val. acc.: 1.0000
Epoch 904
6s - loss: 0.4771 - acc.: 1.0000 - val. loss: 0.7105 - val. acc.: 1.0000
Epoch 905
6s - loss: 0.4772 - acc.: 1.0000 - val. loss: 0.7040 - val. acc.: 1.0000
Epoch 906
6s - loss: 0.4762 - acc.: 1.0000 - val. loss: 0.6983 - val. acc.: 1.0000
Epoch 907
6s - loss: 0.4756 - acc.: 1.0000 - val. loss: 0.7019 - val. acc.: 1.0000
Epoch 908
6s - loss: 0.4752 - acc.: 1.0000 - val. loss: 0.7077 - val. acc.: 1.0000
Epoch 909
6s - loss: 0.4763 - acc.: 1.0000 - val. loss: 0.7060 - val. acc.: 1.0000
Epoch 910
6s - loss: 0.4788 - acc.: 1.0000 - val. loss: 0.7066 - val. acc.: 1.0000
Epoch 911
6s - loss: 0.4763 - acc.: 1.0000 - val. loss: 0.7097 - val. acc.: 1.0000
Epoch 912
6s - loss: 0.4758 - acc.: 1.0000 - val. loss: 0.7053 - val. acc.: 1.0000
Epoch 913
6s - loss: 0.4773 - acc.: 1.0000 - val. loss: 0.7062 - val. acc.: 1.0000
Epoch 914
6s - loss: 0.4751 - acc.: 1.0000 - val. loss: 0.7014 - val. acc.: 1.0000
Epoch 915
6s - loss: 0.4740 - acc.: 1.0000 - val. loss: 0.7057 - val. acc.: 1.0000
Epoch 916
6s - loss: 0.4766 - acc.: 1.0000 - val. loss: 0.7027 - val. acc.: 1.0000
Epoch 917
6s - loss: 0.4747 - acc.: 1.0000 - val. loss: 0.7069 - val. acc.: 1.0000
Epoch 918
6s - loss: 0.4758 - acc.: 1.0000 - val. loss: 0.7130 - val. acc.: 1.0000
Epoch 919
6s - loss: 0.4757 - acc.: 1.0000 - val. loss: 0.7063 - val. acc.: 1.0000
Epoch 920
6s - loss: 0.4756 - acc.: 1.0000 - val. loss: 0.7018 - val. acc.: 1.0000
Epoch 921
6s - loss: 0.4733 - acc.: 1.0000 - val. loss: 0.7080 - val. acc.: 1.0000
Epoch 922
6s - loss: 0.4750 - acc.: 1.0000 - val. loss: 0.7021 - val. acc.: 1.0000
Epoch 923
6s - loss: 0.4747 - acc.: 1.0000 - val. loss: 0.7059 - val. acc.: 1.0000
Epoch 924
6s - loss: 0.4741 - acc.: 1.0000 - val. loss: 0.7056 - val. acc.: 1.0000
Epoch 925
6s - loss: 0.4731 - acc.: 1.0000 - val. loss: 0.7059 - val. acc.: 1.0000
Epoch 926
6s - loss: 0.4760 - acc.: 1.0000 - val. loss: 0.7068 - val. acc.: 1.0000
Epoch 927
6s - loss: 0.4746 - acc.: 1.0000 - val. loss: 0.7128 - val. acc.: 1.0000
Epoch 928
6s - loss: 0.4742 - acc.: 1.0000 - val. loss: 0.7100 - val. acc.: 1.0000
Epoch 929
6s - loss: 0.4763 - acc.: 1.0000 - val. loss: 0.7064 - val. acc.: 1.0000
Epoch 930
6s - loss: 0.4755 - acc.: 1.0000 - val. loss: 0.7036 - val. acc.: 1.0000
Epoch 931
6s - loss: 0.4754 - acc.: 1.0000 - val. loss: 0.7040 - val. acc.: 1.0000
Epoch 932
6s - loss: 0.4742 - acc.: 1.0000 - val. loss: 0.7026 - val. acc.: 1.0000
Epoch 933
6s - loss: 0.4773 - acc.: 1.0000 - val. loss: 0.7078 - val. acc.: 1.0000
Epoch 934
6s - loss: 0.4761 - acc.: 1.0000 - val. loss: 0.7088 - val. acc.: 1.0000
Epoch 935
6s - loss: 0.4734 - acc.: 1.0000 - val. loss: 0.7033 - val. acc.: 1.0000
Epoch 936
6s - loss: 0.4736 - acc.: 1.0000 - val. loss: 0.6999 - val. acc.: 1.0000
Epoch 937
6s - loss: 0.4746 - acc.: 1.0000 - val. loss: 0.7100 - val. acc.: 1.0000
Epoch 938
6s - loss: 0.4732 - acc.: 1.0000 - val. loss: 0.7156 - val. acc.: 1.0000
Epoch 939
6s - loss: 0.4750 - acc.: 1.0000 - val. loss: 0.7088 - val. acc.: 1.0000
Epoch 940
6s - loss: 0.4743 - acc.: 1.0000 - val. loss: 0.7083 - val. acc.: 1.0000
Epoch 941
6s - loss: 0.4742 - acc.: 1.0000 - val. loss: 0.7121 - val. acc.: 1.0000
Epoch 942
6s - loss: 0.4743 - acc.: 1.0000 - val. loss: 0.7055 - val. acc.: 1.0000
Epoch 943
6s - loss: 0.4739 - acc.: 1.0000 - val. loss: 0.7067 - val. acc.: 1.0000
Epoch 944
6s - loss: 0.4741 - acc.: 1.0000 - val. loss: 0.7067 - val. acc.: 1.0000
Epoch 945
6s - loss: 0.4733 - acc.: 1.0000 - val. loss: 0.7058 - val. acc.: 1.0000
Epoch 946
6s - loss: 0.4745 - acc.: 1.0000 - val. loss: 0.7120 - val. acc.: 1.0000
Epoch 947
6s - loss: 0.4750 - acc.: 1.0000 - val. loss: 0.7056 - val. acc.: 1.0000
Epoch 948
6s - loss: 0.4736 - acc.: 1.0000 - val. loss: 0.7151 - val. acc.: 1.0000
Epoch 949
6s - loss: 0.4749 - acc.: 1.0000 - val. loss: 0.7048 - val. acc.: 1.0000
Epoch 950
6s - loss: 0.4740 - acc.: 1.0000 - val. loss: 0.7066 - val. acc.: 1.0000
Epoch 951
6s - loss: 0.4733 - acc.: 1.0000 - val. loss: 0.7025 - val. acc.: 1.0000
Epoch 952
6s - loss: 0.4735 - acc.: 1.0000 - val. loss: 0.7078 - val. acc.: 1.0000
Epoch 953
6s - loss: 0.4743 - acc.: 1.0000 - val. loss: 0.7034 - val. acc.: 1.0000
Epoch 954
6s - loss: 0.4734 - acc.: 1.0000 - val. loss: 0.7131 - val. acc.: 1.0000
Epoch 955
6s - loss: 0.4748 - acc.: 1.0000 - val. loss: 0.7066 - val. acc.: 1.0000
Epoch 956
6s - loss: 0.4732 - acc.: 1.0000 - val. loss: 0.7066 - val. acc.: 1.0000
Epoch 957
6s - loss: 0.4723 - acc.: 1.0000 - val. loss: 0.7000 - val. acc.: 1.0000
Epoch 958
6s - loss: 0.4722 - acc.: 1.0000 - val. loss: 0.7063 - val. acc.: 1.0000
Epoch 959
6s - loss: 0.4750 - acc.: 1.0000 - val. loss: 0.7093 - val. acc.: 1.0000
Epoch 960
6s - loss: 0.4724 - acc.: 1.0000 - val. loss: 0.7050 - val. acc.: 1.0000
Epoch 961
6s - loss: 0.4726 - acc.: 1.0000 - val. loss: 0.7102 - val. acc.: 1.0000
Epoch 962
6s - loss: 0.4718 - acc.: 1.0000 - val. loss: 0.7024 - val. acc.: 1.0000
Epoch 963
6s - loss: 0.4741 - acc.: 1.0000 - val. loss: 0.7053 - val. acc.: 1.0000
Epoch 964
6s - loss: 0.4741 - acc.: 1.0000 - val. loss: 0.7083 - val. acc.: 1.0000
Epoch 965
6s - loss: 0.4713 - acc.: 1.0000 - val. loss: 0.7107 - val. acc.: 1.0000
Epoch 966
6s - loss: 0.4712 - acc.: 1.0000 - val. loss: 0.7113 - val. acc.: 1.0000
Epoch 967
6s - loss: 0.4737 - acc.: 1.0000 - val. loss: 0.7082 - val. acc.: 1.0000
Epoch 968
6s - loss: 0.4738 - acc.: 1.0000 - val. loss: 0.7131 - val. acc.: 1.0000
Epoch 969
6s - loss: 0.4722 - acc.: 1.0000 - val. loss: 0.7064 - val. acc.: 1.0000
Epoch 970
6s - loss: 0.4716 - acc.: 1.0000 - val. loss: 0.7101 - val. acc.: 1.0000
Epoch 971
6s - loss: 0.4709 - acc.: 1.0000 - val. loss: 0.7053 - val. acc.: 1.0000
Epoch 972
6s - loss: 0.4717 - acc.: 1.0000 - val. loss: 0.7069 - val. acc.: 1.0000
Epoch 973
6s - loss: 0.4710 - acc.: 1.0000 - val. loss: 0.7082 - val. acc.: 1.0000
Epoch 974
6s - loss: 0.4742 - acc.: 1.0000 - val. loss: 0.7068 - val. acc.: 1.0000
Epoch 975
6s - loss: 0.4713 - acc.: 1.0000 - val. loss: 0.7127 - val. acc.: 1.0000
Epoch 976
6s - loss: 0.4688 - acc.: 1.0000 - val. loss: 0.7104 - val. acc.: 1.0000
Epoch 977
6s - loss: 0.4735 - acc.: 1.0000 - val. loss: 0.7044 - val. acc.: 1.0000
Epoch 978
6s - loss: 0.4723 - acc.: 1.0000 - val. loss: 0.7100 - val. acc.: 1.0000
Epoch 979
6s - loss: 0.4728 - acc.: 1.0000 - val. loss: 0.7072 - val. acc.: 1.0000
Epoch 980
6s - loss: 0.4740 - acc.: 1.0000 - val. loss: 0.7050 - val. acc.: 1.0000
Epoch 981
6s - loss: 0.4725 - acc.: 1.0000 - val. loss: 0.7126 - val. acc.: 1.0000
Epoch 982
6s - loss: 0.4733 - acc.: 1.0000 - val. loss: 0.7101 - val. acc.: 1.0000
Epoch 983
6s - loss: 0.4722 - acc.: 1.0000 - val. loss: 0.7150 - val. acc.: 1.0000
Epoch 984
6s - loss: 0.4731 - acc.: 1.0000 - val. loss: 0.7125 - val. acc.: 1.0000
Epoch 985
6s - loss: 0.4719 - acc.: 1.0000 - val. loss: 0.7021 - val. acc.: 1.0000
Epoch 986
6s - loss: 0.4737 - acc.: 1.0000 - val. loss: 0.7102 - val. acc.: 1.0000
Epoch 987
6s - loss: 0.4714 - acc.: 1.0000 - val. loss: 0.7062 - val. acc.: 1.0000
Epoch 988
6s - loss: 0.4725 - acc.: 1.0000 - val. loss: 0.7137 - val. acc.: 1.0000
Epoch 989
6s - loss: 0.4700 - acc.: 1.0000 - val. loss: 0.7115 - val. acc.: 1.0000
Epoch 990
6s - loss: 0.4723 - acc.: 1.0000 - val. loss: 0.7195 - val. acc.: 1.0000
Epoch 991
6s - loss: 0.4700 - acc.: 1.0000 - val. loss: 0.7127 - val. acc.: 1.0000
Epoch 992
6s - loss: 0.4730 - acc.: 1.0000 - val. loss: 0.7073 - val. acc.: 1.0000
Epoch 993
6s - loss: 0.4732 - acc.: 1.0000 - val. loss: 0.7088 - val. acc.: 1.0000
Epoch 994
6s - loss: 0.4697 - acc.: 1.0000 - val. loss: 0.7130 - val. acc.: 1.0000
Epoch 995
6s - loss: 0.4704 - acc.: 1.0000 - val. loss: 0.7064 - val. acc.: 1.0000
Epoch 996
6s - loss: 0.4724 - acc.: 1.0000 - val. loss: 0.7119 - val. acc.: 1.0000
Epoch 997
6s - loss: 0.4716 - acc.: 1.0000 - val. loss: 0.7085 - val. acc.: 1.0000
Epoch 998
6s - loss: 0.4730 - acc.: 1.0000 - val. loss: 0.7046 - val. acc.: 1.0000
Epoch 999
6s - loss: 0.4709 - acc.: 1.0000 - val. loss: 0.7067 - val. acc.: 1.0000

In [137]:
plt.hist(y_test,bins=50);
plt.figure('distribution of distances')

y_pred = model_relu.predict(X_test)
plt.hist(y_pred,bins=50);
plt.figure('distribution of predictions')

plt.figure()

plt.scatter(y_pred,y_test,linewidths=0,alpha=0.5,s=2)
plt.xlabel('nonlinear prediction (deep relu net)')
plt.ylabel('actual')
plt.title('kinetic distance')


30021/30021 [==============================] - 0s     
Out[137]:
<matplotlib.text.Text at 0x120d66a10>
<matplotlib.figure.Figure at 0x11dee4450>

In [80]:
model = Sequential()
model.add(Dense(168, 50))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(50, 1))
model.add(Activation('relu'))

rms = RMSprop()
model.compile(loss='mse', optimizer=rms)
model.fit(X_train, y_train, batch_size=10, nb_epoch=200, show_accuracy=True, verbose=2, validation_data=(X_test, y_test))
score = model.evaluate(X_test, y_test, show_accuracy=False, verbose=0)


Train on 69979 samples, validate on 30021 samples
Epoch 0
4s - loss: 0.9988 - acc.: 1.0000 - val. loss: 1.0031 - val. acc.: 1.0000
Epoch 1
4s - loss: 0.9987 - acc.: 1.0000 - val. loss: 1.0031 - val. acc.: 1.0000
Epoch 2
4s - loss: 0.9987 - acc.: 1.0000 - val. loss: 1.0031 - val. acc.: 1.0000
Epoch 3
4s - loss: 0.9987 - acc.: 1.0000 - val. loss: 1.0031 - val. acc.: 1.0000
Epoch 4
4s - loss: 0.9987 - acc.: 1.0000 - val. loss: 1.0031 - val. acc.: 1.0000
Epoch 5
4s - loss: 0.9987 - acc.: 1.0000 - val. loss: 1.0031 - val. acc.: 1.0000
Epoch 6
4s - loss: 0.9987 - acc.: 1.0000 - val. loss: 1.0031 - val. acc.: 1.0000
Epoch 7
4s - loss: 0.9987 - acc.: 1.0000 - val. loss: 1.0031 - val. acc.: 1.0000
Epoch 8
---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-80-9fba4acff328> in <module>()
      8 rms = RMSprop()
      9 model.compile(loss='mse', optimizer=rms)
---> 10 model.fit(X_train, y_train, batch_size=10, nb_epoch=200, show_accuracy=True, verbose=2, validation_data=(X_test, y_test))
     11 score = model.evaluate(X_test, y_test, show_accuracy=False, verbose=0)

/Users/joshuafass/anaconda/envs/py27/lib/python2.7/site-packages/Keras-0.0.1-py2.7.egg/keras/models.pyc in fit(self, X, y, batch_size, nb_epoch, verbose, validation_split, validation_data, shuffle, show_accuracy)
    265                 # logging
    266                 if verbose:
--> 267                     progbar.update(batch_end, log_values)
    268 
    269             history['epoch'].append(epoch)

/Users/joshuafass/anaconda/envs/py27/lib/python2.7/site-packages/Keras-0.0.1-py2.7.egg/keras/utils/generic_utils.pyc in update(self, current, values)
     64                 self.unique_values.append(k)
     65             else:
---> 66                 self.sum_values[k][0] += v * (current-self.seen_so_far)
     67                 self.sum_values[k][1] += (current-self.seen_so_far)
     68         self.seen_so_far = current

KeyboardInterrupt: 

In [ ]:
# and how well can we do with just the aligned atomic distances?

In [140]:
from mdtraj.geometry import alignment

def compute_atomwise_deviation_xyz(X_xyz,Y_xyz):
    ''' given two sets of coordinates as numpy arrays,
    align them and return the vector of distances between
    corresponding pairs of atoms'''
    X_prime = alignment.transform(X_xyz, Y_xyz)
    delta = X_prime - Y_xyz
    deviation = ((delta**2).sum(1))**0.5
    return deviation

def compute_atomwise_deviation(X,Y):
    ''' given trajectory frames, compute atomwise deviations'''
    return compute_atomwise_deviation_xyz(X.xyz[0],Y.xyz[0])

In [143]:
fs.trajectories[0].n_atoms


Out[143]:
264

In [150]:
def generate_skip_gram_deviations(sequences,neighborhood_size=10,n_examples=100000,prop_positive=0.5):
    np.random.seed(0)
    deviations = np.zeros((n_examples,sequences[0].n_atoms))
    y = np.zeros((n_examples,2))
    
    for i in range(n_examples):
        ind1=np.random.randint(len(sequences))
        sequence = sequences[ind1]
        pivot = np.random.randint(len(sequence)-neighborhood_size)
        if np.random.rand()<prop_positive:
            label=1
            other = np.random.randint(neighborhood_size)+pivot
            deviations[i] = compute_atomwise_deviation(sequence[pivot],sequence[other])
        else:
            label=0
            ind2 = np.random.randint(len(sequences))
            sequence2 = sequences[ind2]
            other = np.random.randint(len(sequence))
            while ind1==ind2 and abs(other-pivot) < neighborhood_size:
                ind2 = np.random.randint(len(sequences))
                sequence2 = sequences[ind2]
                other = np.random.randint(len(sequence2))
            deviations[i] = compute_atomwise_deviation(sequence[pivot],sequence2[other])
        
        if i % (n_examples / 50) == 0:
            print(i)
        
        y[i,label] = 1
    return deviations,y

In [155]:
X_deviations,y_dev = generate_skip_gram_deviations(fs.trajectories,n_examples=100000)


0
2000
4000
6000
8000
10000
12000
14000
16000
18000
20000
22000
24000
26000
28000
30000
32000
34000
36000
38000
40000
42000
44000
46000
48000
50000
52000
54000
56000
58000
60000
62000
64000
66000
68000
70000
72000
74000
76000
78000
80000
82000
84000
86000
88000
90000
92000
94000
96000
98000

In [156]:
np.random.seed(0)
mask = np.random.rand(len(X_deviations))<0.7
X_train,y_train = X_deviations[mask],y_dev[mask]
X_test,y_test = X_deviations[-mask],y_dev[-mask]


/Users/joshuafass/anaconda/envs/py27/lib/python2.7/site-packages/IPython/kernel/__main__.py:4: DeprecationWarning: numpy boolean negative (the unary `-` operator) is deprecated, use the bitwise_xor (the `^` operator) or the logical_xor function instead.
/Users/joshuafass/anaconda/envs/py27/lib/python2.7/site-packages/IPython/kernel/__main__.py:4: DeprecationWarning: numpy boolean negative (the unary `-` operator) is deprecated, use the bitwise_xor (the `^` operator) or the logical_xor function instead.

In [157]:
n_atoms = len(X_train.T)

In [158]:
model = Sequential()
model.add(Dense(n_atoms, 100))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(100, 2))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(2, 2))

rms = RMSprop()
model.compile(loss='mse', optimizer=rms)
model.fit(X_train, y_train, batch_size=10, nb_epoch=10, show_accuracy=True, verbose=2, validation_data=(X_test, y_test))
score = model.evaluate(X_test, y_test, show_accuracy=False, verbose=0)


Train on 69979 samples, validate on 30021 samples
Epoch 0
4s - loss: 0.0821 - acc.: 0.8712 - val. loss: 0.0226 - val. acc.: 0.9856
Epoch 1
5s - loss: 0.0621 - acc.: 0.8854 - val. loss: 0.0203 - val. acc.: 0.9866
Epoch 2
5s - loss: 0.0605 - acc.: 0.8873 - val. loss: 0.0238 - val. acc.: 0.9823
Epoch 3
6s - loss: 0.0605 - acc.: 0.8896 - val. loss: 0.0218 - val. acc.: 0.9841
Epoch 4
6s - loss: 0.0601 - acc.: 0.8878 - val. loss: 0.0197 - val. acc.: 0.9869
Epoch 5
6s - loss: 0.0603 - acc.: 0.8872 - val. loss: 0.0219 - val. acc.: 0.9849
Epoch 6
6s - loss: 0.0604 - acc.: 0.8882 - val. loss: 0.0205 - val. acc.: 0.9866
Epoch 7
6s - loss: 0.0605 - acc.: 0.8870 - val. loss: 0.0209 - val. acc.: 0.9859
Epoch 8
6s - loss: 0.0602 - acc.: 0.8881 - val. loss: 0.0232 - val. acc.: 0.9824
Epoch 9
6s - loss: 0.0595 - acc.: 0.8890 - val. loss: 0.0233 - val. acc.: 0.9831

In [159]:
model = Sequential()
model.add(Dense(n_atoms, 100))
model.add(Activation('tanh'))
model.add(Dropout(0.2))
model.add(Dense(100, 2))
model.add(Activation('tanh'))
model.add(Dropout(0.2))
model.add(Dense(2, 2))

rms = RMSprop()
model.compile(loss='mse', optimizer=rms)
model.fit(X_train, y_train, batch_size=10, nb_epoch=10, show_accuracy=True, verbose=2, validation_data=(X_test, y_test))
score = model.evaluate(X_test, y_test, show_accuracy=False, verbose=0)


Train on 69979 samples, validate on 30021 samples
Epoch 0
3s - loss: 0.0539 - acc.: 0.9524 - val. loss: 0.0199 - val. acc.: 0.9791
Epoch 1
3s - loss: 0.0402 - acc.: 0.9625 - val. loss: 0.0139 - val. acc.: 0.9864
Epoch 2
3s - loss: 0.0386 - acc.: 0.9653 - val. loss: 0.0137 - val. acc.: 0.9867
Epoch 3
3s - loss: 0.0380 - acc.: 0.9663 - val. loss: 0.0135 - val. acc.: 0.9868
Epoch 4
3s - loss: 0.0380 - acc.: 0.9660 - val. loss: 0.0133 - val. acc.: 0.9879
Epoch 5
3s - loss: 0.0378 - acc.: 0.9675 - val. loss: 0.0146 - val. acc.: 0.9858
Epoch 6
3s - loss: 0.0378 - acc.: 0.9657 - val. loss: 0.0131 - val. acc.: 0.9877
Epoch 7
3s - loss: 0.0379 - acc.: 0.9667 - val. loss: 0.0130 - val. acc.: 0.9879
Epoch 8
3s - loss: 0.0374 - acc.: 0.9673 - val. loss: 0.0239 - val. acc.: 0.9720
Epoch 9
3s - loss: 0.0369 - acc.: 0.9680 - val. loss: 0.0126 - val. acc.: 0.9887

In [215]:
model = Sequential()
model.add(Dense(n_atoms, 2))

rms = RMSprop()
model.compile(loss='mse', optimizer=rms)
model.fit(X_train, y_train, batch_size=10, nb_epoch=10, show_accuracy=True, verbose=2, validation_data=(X_test, y_test))
score = model.evaluate(X_test, y_test, show_accuracy=False, verbose=0)


Train on 69979 samples, validate on 30021 samples
Epoch 0
0s - loss: 0.0542 - acc.: 0.9654 - val. loss: 0.0380 - val. acc.: 0.9820
Epoch 1
0s - loss: 0.0376 - acc.: 0.9782 - val. loss: 0.0321 - val. acc.: 0.9800
Epoch 2
0s - loss: 0.0364 - acc.: 0.9789 - val. loss: 0.0342 - val. acc.: 0.9832
Epoch 3
0s - loss: 0.0357 - acc.: 0.9800 - val. loss: 0.0312 - val. acc.: 0.9788
Epoch 4
0s - loss: 0.0352 - acc.: 0.9806 - val. loss: 0.0305 - val. acc.: 0.9835
Epoch 5
0s - loss: 0.0350 - acc.: 0.9814 - val. loss: 0.0454 - val. acc.: 0.9704
Epoch 6
0s - loss: 0.0347 - acc.: 0.9818 - val. loss: 0.0301 - val. acc.: 0.9824
Epoch 7
0s - loss: 0.0345 - acc.: 0.9820 - val. loss: 0.0348 - val. acc.: 0.9791
Epoch 8
0s - loss: 0.0344 - acc.: 0.9818 - val. loss: 0.0394 - val. acc.: 0.9768
Epoch 9
0s - loss: 0.0344 - acc.: 0.9819 - val. loss: 0.0483 - val. acc.: 0.9856

In [216]:
layer = model.layers[0]
weights = layer.W.get_value()

In [218]:
labels = (y_test*np.vstack((np.zeros(len(y_test)),np.ones(len(y_test)))).T).sum(1)

In [225]:
X_unsup = PCA(2).fit_transform(X_test)

In [232]:
plt.scatter(X_unsup[:,0],X_unsup[:,1],s=1,alpha=0.1,linewidths=0,c=labels)


Out[232]:
<matplotlib.collections.PathCollection at 0x1144b8210>

In [235]:
from sklearn.lda import LDA
lda = LDA(2)
X_sup = lda.fit_transform(X_test,labels)
X_sup.shape


Out[235]:
(30021, 1)

In [236]:
plt.scatter(X_sup,np.random.randn(len(X_sup)),s=1,alpha=0.1,linewidths=0,c=labels)


Out[236]:
<matplotlib.collections.PathCollection at 0x11f3c04d0>

In [217]:
X_pred_lin = PCA(2).fit_transform(np.dot(X_test,weights))

In [219]:
plt.scatter(X_pred_lin[:,0],X_pred_lin[:,1],s=1,alpha=0.1,linewidths=0,c=labels)


Out[219]:
<matplotlib.collections.PathCollection at 0x1192fa310>

In [223]:
plt.plot(weights[:,0])


Out[223]:
[<matplotlib.lines.Line2D at 0x1142bec10>]

In [224]:
plt.scatter(weights[:,0],weights[:,1])


Out[224]:
<matplotlib.collections.PathCollection at 0x11d2497d0>

In [239]:
from msmbuilder.featurizer import RawPositionsFeaturizer
rpft = RawPositionsFeaturizer().fit_transform(fs.trajectories)

In [245]:
X = np.vstack(rpft)

In [247]:
X.shape


Out[247]:
(280000, 792)

In [248]:
np.random.seed(0)
mask = np.random.rand(len(X))<0.7
X_train = X[mask]
X_test = X[-mask]


/Users/joshuafass/anaconda/envs/py27/lib/python2.7/site-packages/IPython/kernel/__main__.py:4: DeprecationWarning: numpy boolean negative (the unary `-` operator) is deprecated, use the bitwise_xor (the `^` operator) or the logical_xor function instead.

In [250]:
n_coords = X.shape[1]
model = Sequential()
model.add(Dense(n_coords, 2))
model.add(Dense(2, n_coords))

rms = RMSprop()
model.compile(loss='mse', optimizer=rms)
model.fit(X_train, X_train, batch_size=10, nb_epoch=10, show_accuracy=True, verbose=2, validation_data=(X_test, X_test))
score = model.evaluate(X_test, X_test, show_accuracy=False, verbose=0)


Train on 195989 samples, validate on 84011 samples
Epoch 0
6s - loss: 0.2381 - acc.: 0.0253 - val. loss: 0.2377 - val. acc.: 0.0270
Epoch 1
5s - loss: 0.2372 - acc.: 0.0267 - val. loss: 0.2378 - val. acc.: 0.0219
Epoch 2
5s - loss: 0.2372 - acc.: 0.0258 - val. loss: 0.2372 - val. acc.: 0.0289
Epoch 3
5s - loss: 0.2372 - acc.: 0.0268 - val. loss: 0.2382 - val. acc.: 0.0231
Epoch 4
5s - loss: 0.2372 - acc.: 0.0261 - val. loss: 0.2390 - val. acc.: 0.0200
Epoch 5
5s - loss: 0.2372 - acc.: 0.0259 - val. loss: 0.2371 - val. acc.: 0.0243
Epoch 6
5s - loss: 0.2372 - acc.: 0.0261 - val. loss: 0.2381 - val. acc.: 0.0221
Epoch 7
5s - loss: 0.2372 - acc.: 0.0262 - val. loss: 0.2372 - val. acc.: 0.0243
Epoch 8
5s - loss: 0.2372 - acc.: 0.0261 - val. loss: 0.2375 - val. acc.: 0.0251
Epoch 9
5s - loss: 0.2372 - acc.: 0.0253 - val. loss: 0.2374 - val. acc.: 0.0231

In [254]:
layer = model.layers[0]
weights = layer.W.get_value()

In [251]:
X_pca = PCA(2).fit_transform(X)

In [252]:
plt.scatter(X_pca[:,0],X_pca[:,1],linewidths=0,alpha=0.5,s=2)


Out[252]:
<matplotlib.collections.PathCollection at 0x120f982d0>

In [255]:
X_nn_pca = np.dot(X,weights)

In [256]:
plt.scatter(X_nn_pca[:,0],X_nn_pca[:,1],linewidths=0,alpha=0.5,s=2)


Out[256]:
<matplotlib.collections.PathCollection at 0x120de9990>

In [ ]:
n_coords = X.shape[1]
model = Sequential()
model.add(Dense(n_coords, 2))

model.add(Dense(2, n_coords))

rms = RMSprop()
model.compile(loss='mse', optimizer=rms)
model.fit(X_train, X_train, batch_size=10, nb_epoch=10, show_accuracy=True, verbose=2, validation_data=(X_test, X_test))
score = model.evaluate(X_test, X_test, show_accuracy=False, verbose=0)

In [258]:
n_coords = X.shape[1]
model = Sequential()
model.add(Dense(n_coords, 50))
model.add(Activation('tanh'))
model.add(Dense(50, 2))
model.add(Activation('tanh'))
model.add(Dense(2, 50))
model.add(Activation('tanh'))
model.add(Dense(50, n_coords))

rms = RMSprop()
model.compile(loss='mse', optimizer=rms)
model.fit(X_train, X_train, batch_size=10, nb_epoch=100, show_accuracy=True, verbose=2, validation_data=(X_test, X_test))
score = model.evaluate(X_test, X_test, show_accuracy=False, verbose=0)


Train on 195989 samples, validate on 84011 samples
Epoch 0
23s - loss: 0.2399 - acc.: 0.0245 - val. loss: 0.2248 - val. acc.: 0.0307
Epoch 1
24s - loss: 0.2141 - acc.: 0.0277 - val. loss: 0.2070 - val. acc.: 0.0328
Epoch 2
24s - loss: 0.1996 - acc.: 0.0303 - val. loss: 0.1948 - val. acc.: 0.0424
Epoch 3
25s - loss: 0.1939 - acc.: 0.0367 - val. loss: 0.1938 - val. acc.: 0.0419
Epoch 4
24s - loss: 0.1910 - acc.: 0.0386 - val. loss: 0.1890 - val. acc.: 0.0368
Epoch 5
25s - loss: 0.1835 - acc.: 0.0395 - val. loss: 0.1842 - val. acc.: 0.0365
Epoch 6
25s - loss: 0.1829 - acc.: 0.0385 - val. loss: 0.1843 - val. acc.: 0.0413
Epoch 7
25s - loss: 0.1804 - acc.: 0.0365 - val. loss: 0.1786 - val. acc.: 0.0339
Epoch 8
24s - loss: 0.1795 - acc.: 0.0383 - val. loss: 0.1778 - val. acc.: 0.0363
Epoch 9
25s - loss: 0.1760 - acc.: 0.0415 - val. loss: 0.1745 - val. acc.: 0.0429
Epoch 10
26s - loss: 0.1767 - acc.: 0.0411 - val. loss: 0.1780 - val. acc.: 0.0419
Epoch 11
26s - loss: 0.1778 - acc.: 0.0407 - val. loss: 0.1736 - val. acc.: 0.0415
Epoch 12
25s - loss: 0.1728 - acc.: 0.0409 - val. loss: 0.1714 - val. acc.: 0.0412
Epoch 13
26s - loss: 0.1712 - acc.: 0.0384 - val. loss: 0.1704 - val. acc.: 0.0461
Epoch 14
25s - loss: 0.1708 - acc.: 0.0416 - val. loss: 0.1702 - val. acc.: 0.0445
Epoch 15
25s - loss: 0.1696 - acc.: 0.0416 - val. loss: 0.1691 - val. acc.: 0.0409
Epoch 16
25s - loss: 0.1693 - acc.: 0.0411 - val. loss: 0.1690 - val. acc.: 0.0385
Epoch 17
26s - loss: 0.1694 - acc.: 0.0424 - val. loss: 0.1683 - val. acc.: 0.0446
Epoch 18
25s - loss: 0.1691 - acc.: 0.0430 - val. loss: 0.1696 - val. acc.: 0.0462
Epoch 19
25s - loss: 0.1701 - acc.: 0.0420 - val. loss: 0.1712 - val. acc.: 0.0381
Epoch 20
25s - loss: 0.1693 - acc.: 0.0394 - val. loss: 0.1684 - val. acc.: 0.0334
Epoch 21
26s - loss: 0.1678 - acc.: 0.0413 - val. loss: 0.1659 - val. acc.: 0.0386
Epoch 22
26s - loss: 0.1662 - acc.: 0.0403 - val. loss: 0.1661 - val. acc.: 0.0397
Epoch 23
26s - loss: 0.1660 - acc.: 0.0397 - val. loss: 0.1667 - val. acc.: 0.0401
Epoch 24
25s - loss: 0.1656 - acc.: 0.0397 - val. loss: 0.1654 - val. acc.: 0.0416
Epoch 25
25s - loss: 0.1644 - acc.: 0.0416 - val. loss: 0.1645 - val. acc.: 0.0413
Epoch 26
25s - loss: 0.1636 - acc.: 0.0417 - val. loss: 0.1636 - val. acc.: 0.0443
Epoch 27
25s - loss: 0.1624 - acc.: 0.0423 - val. loss: 0.1626 - val. acc.: 0.0426
Epoch 28
25s - loss: 0.1619 - acc.: 0.0422 - val. loss: 0.1614 - val. acc.: 0.0465
Epoch 29
25s - loss: 0.1619 - acc.: 0.0418 - val. loss: 0.1635 - val. acc.: 0.0413
Epoch 30
25s - loss: 0.1639 - acc.: 0.0415 - val. loss: 0.1647 - val. acc.: 0.0447
Epoch 31
25s - loss: 0.1654 - acc.: 0.0415 - val. loss: 0.1637 - val. acc.: 0.0440
Epoch 32
24s - loss: 0.1638 - acc.: 0.0429 - val. loss: 0.1637 - val. acc.: 0.0450
Epoch 33
24s - loss: 0.1636 - acc.: 0.0426 - val. loss: 0.1627 - val. acc.: 0.0405
Epoch 34
24s - loss: 0.1643 - acc.: 0.0426 - val. loss: 0.1644 - val. acc.: 0.0375
Epoch 35
24s - loss: 0.1635 - acc.: 0.0445 - val. loss: 0.1624 - val. acc.: 0.0513
Epoch 36
24s - loss: 0.1620 - acc.: 0.0472 - val. loss: 0.1622 - val. acc.: 0.0462
Epoch 37
25s - loss: 0.1610 - acc.: 0.0485 - val. loss: 0.1594 - val. acc.: 0.0490
Epoch 38
24s - loss: 0.1599 - acc.: 0.0478 - val. loss: 0.1584 - val. acc.: 0.0512
Epoch 39
24s - loss: 0.1588 - acc.: 0.0473 - val. loss: 0.1581 - val. acc.: 0.0460
Epoch 40
25s - loss: 0.1582 - acc.: 0.0469 - val. loss: 0.1575 - val. acc.: 0.0461
Epoch 41
24s - loss: 0.1579 - acc.: 0.0461 - val. loss: 0.1595 - val. acc.: 0.0481
Epoch 42
24s - loss: 0.1586 - acc.: 0.0477 - val. loss: 0.1616 - val. acc.: 0.0460
Epoch 43
24s - loss: 0.1602 - acc.: 0.0470 - val. loss: 0.1628 - val. acc.: 0.0511
Epoch 44
24s - loss: 0.1618 - acc.: 0.0466 - val. loss: 0.1641 - val. acc.: 0.0455
Epoch 45
24s - loss: 0.1621 - acc.: 0.0447 - val. loss: 0.1610 - val. acc.: 0.0471
Epoch 46
24s - loss: 0.1595 - acc.: 0.0457 - val. loss: 0.1591 - val. acc.: 0.0508
Epoch 47
24s - loss: 0.1590 - acc.: 0.0448 - val. loss: 0.1617 - val. acc.: 0.0414
Epoch 48
24s - loss: 0.1588 - acc.: 0.0426 - val. loss: 0.1610 - val. acc.: 0.0429
Epoch 49
---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-258-61c8e10598b7> in <module>()
     11 rms = RMSprop()
     12 model.compile(loss='mse', optimizer=rms)
---> 13 model.fit(X_train, X_train, batch_size=10, nb_epoch=100, show_accuracy=True, verbose=2, validation_data=(X_test, X_test))
     14 score = model.evaluate(X_test, X_test, show_accuracy=False, verbose=0)

/Users/joshuafass/anaconda/envs/py27/lib/python2.7/site-packages/Keras-0.0.1-py2.7.egg/keras/models.pyc in fit(self, X, y, batch_size, nb_epoch, verbose, validation_split, validation_data, shuffle, show_accuracy)
    245                 ins = X_batch + [y_batch]
    246                 if show_accuracy:
--> 247                     loss, acc = self._train_with_acc(*ins)
    248                     log_values = [('loss', loss), ('acc.', acc)]
    249                     av_loss += loss * len(batch_ids)

/Users/joshuafass/anaconda/envs/py27/lib/python2.7/site-packages/theano/compile/function_module.pyc in __call__(self, *args, **kwargs)
    593         t0_fn = time.time()
    594         try:
--> 595             outputs = self.fn()
    596         except Exception:
    597             if hasattr(self.fn, 'position_of_error'):

KeyboardInterrupt: 

In [277]:
layer1 = model.layers[0]
layer1_out = np.tanh(np.dot(X,layer1.W.get_value())+layer1.b.get_value())
layer2 = model.layers[2]
layer2_out = layer2.activation(np.dot(layer1_out,layer2.W.get_value())+layer2.b.get_value())
layer1_out.shape,layer2_out.shape


Out[277]:
((280000, 50), (280000, 2))

In [278]:
X_nn = layer2_out

In [280]:
plt.scatter(X_nn[:,0],X_nn[:,1],linewidths=0,alpha=0.5,s=2,c=np.arange(len(X_nn)))


Out[280]:
<matplotlib.collections.PathCollection at 0x10a53ee50>

In [4]:
# now with dihedral angles instead

X = np.vstack(dhft)
np.random.seed(0)
mask = np.random.rand(len(X))<0.7
X_train = X[mask]
X_test = X[-mask]

n_coords = X.shape[1]
model = Sequential()
model.add(Dense(n_coords, 50))
model.add(Activation('tanh'))
model.add(Dense(50, 2))
model.add(Activation('tanh'))
model.add(Dense(2, 50))
model.add(Activation('tanh'))
model.add(Dense(50, n_coords))

rms = RMSprop()
model.compile(loss='mse', optimizer=rms)
model.fit(X_train, X_train, batch_size=10, nb_epoch=100, show_accuracy=True, verbose=2, validation_data=(X_test, X_test))
score = model.evaluate(X_test, X_test, show_accuracy=False, verbose=0)


Train on 195989 samples, validate on 84011 samples
Epoch 0
5s - loss: 0.1736 - acc.: 0.0593 - val. loss: 0.1675 - val. acc.: 0.0604
Epoch 1
5s - loss: 0.1608 - acc.: 0.0666 - val. loss: 0.1535 - val. acc.: 0.0675
Epoch 2
5s - loss: 0.1481 - acc.: 0.0537 - val. loss: 0.1432 - val. acc.: 0.0609
Epoch 3
5s - loss: 0.1412 - acc.: 0.0637 - val. loss: 0.1393 - val. acc.: 0.0643
Epoch 4
5s - loss: 0.1380 - acc.: 0.0664 - val. loss: 0.1363 - val. acc.: 0.0671
Epoch 5
5s - loss: 0.1341 - acc.: 0.0665 - val. loss: 0.1320 - val. acc.: 0.0674
Epoch 6
5s - loss: 0.1308 - acc.: 0.0684 - val. loss: 0.1303 - val. acc.: 0.0708
Epoch 7
5s - loss: 0.1294 - acc.: 0.0672 - val. loss: 0.1291 - val. acc.: 0.0586
Epoch 8
5s - loss: 0.1286 - acc.: 0.0697 - val. loss: 0.1286 - val. acc.: 0.0741
Epoch 9
5s - loss: 0.1278 - acc.: 0.0729 - val. loss: 0.1277 - val. acc.: 0.0624
Epoch 10
5s - loss: 0.1269 - acc.: 0.0750 - val. loss: 0.1263 - val. acc.: 0.0767
Epoch 11
5s - loss: 0.1259 - acc.: 0.0770 - val. loss: 0.1256 - val. acc.: 0.0769
Epoch 12
5s - loss: 0.1247 - acc.: 0.0784 - val. loss: 0.1236 - val. acc.: 0.0700
Epoch 13
5s - loss: 0.1231 - acc.: 0.0819 - val. loss: 0.1222 - val. acc.: 0.0810
Epoch 14
5s - loss: 0.1218 - acc.: 0.0842 - val. loss: 0.1210 - val. acc.: 0.0891
Epoch 15
5s - loss: 0.1209 - acc.: 0.0827 - val. loss: 0.1202 - val. acc.: 0.0908
Epoch 16
5s - loss: 0.1201 - acc.: 0.0821 - val. loss: 0.1193 - val. acc.: 0.1013
Epoch 17
5s - loss: 0.1194 - acc.: 0.0827 - val. loss: 0.1186 - val. acc.: 0.0861
Epoch 18
5s - loss: 0.1187 - acc.: 0.0838 - val. loss: 0.1181 - val. acc.: 0.0790
Epoch 19
5s - loss: 0.1182 - acc.: 0.0849 - val. loss: 0.1176 - val. acc.: 0.1004
Epoch 20
5s - loss: 0.1176 - acc.: 0.0861 - val. loss: 0.1181 - val. acc.: 0.0675
Epoch 21
5s - loss: 0.1171 - acc.: 0.0861 - val. loss: 0.1166 - val. acc.: 0.0873
Epoch 22
5s - loss: 0.1166 - acc.: 0.0850 - val. loss: 0.1167 - val. acc.: 0.0884
Epoch 23
5s - loss: 0.1161 - acc.: 0.0856 - val. loss: 0.1166 - val. acc.: 0.0798
Epoch 24
5s - loss: 0.1156 - acc.: 0.0849 - val. loss: 0.1149 - val. acc.: 0.0795
Epoch 25
5s - loss: 0.1151 - acc.: 0.0846 - val. loss: 0.1156 - val. acc.: 0.0761
Epoch 26
5s - loss: 0.1147 - acc.: 0.0838 - val. loss: 0.1142 - val. acc.: 0.0737
Epoch 27
5s - loss: 0.1143 - acc.: 0.0839 - val. loss: 0.1141 - val. acc.: 0.0747
Epoch 28
5s - loss: 0.1138 - acc.: 0.0830 - val. loss: 0.1127 - val. acc.: 0.0776
Epoch 29
5s - loss: 0.1134 - acc.: 0.0841 - val. loss: 0.1129 - val. acc.: 0.0845
Epoch 30
5s - loss: 0.1129 - acc.: 0.0844 - val. loss: 0.1122 - val. acc.: 0.0834
Epoch 31
5s - loss: 0.1123 - acc.: 0.0861 - val. loss: 0.1118 - val. acc.: 0.0891
Epoch 32
5s - loss: 0.1117 - acc.: 0.0895 - val. loss: 0.1108 - val. acc.: 0.0922
Epoch 33
5s - loss: 0.1112 - acc.: 0.0898 - val. loss: 0.1107 - val. acc.: 0.0735
Epoch 34
5s - loss: 0.1108 - acc.: 0.0913 - val. loss: 0.1121 - val. acc.: 0.0872
Epoch 35
5s - loss: 0.1104 - acc.: 0.0904 - val. loss: 0.1108 - val. acc.: 0.0833
Epoch 36
5s - loss: 0.1100 - acc.: 0.0913 - val. loss: 0.1098 - val. acc.: 0.0850
Epoch 37
5s - loss: 0.1097 - acc.: 0.0914 - val. loss: 0.1102 - val. acc.: 0.0995
Epoch 38
5s - loss: 0.1094 - acc.: 0.0922 - val. loss: 0.1095 - val. acc.: 0.1023
Epoch 39
5s - loss: 0.1091 - acc.: 0.0934 - val. loss: 0.1090 - val. acc.: 0.0939
Epoch 40
5s - loss: 0.1088 - acc.: 0.0926 - val. loss: 0.1082 - val. acc.: 0.0950
Epoch 41
5s - loss: 0.1085 - acc.: 0.0931 - val. loss: 0.1088 - val. acc.: 0.0783
Epoch 42
5s - loss: 0.1083 - acc.: 0.0921 - val. loss: 0.1081 - val. acc.: 0.0935
Epoch 43
5s - loss: 0.1080 - acc.: 0.0929 - val. loss: 0.1081 - val. acc.: 0.0813
Epoch 44
5s - loss: 0.1078 - acc.: 0.0928 - val. loss: 0.1075 - val. acc.: 0.0932
Epoch 45
5s - loss: 0.1075 - acc.: 0.0929 - val. loss: 0.1071 - val. acc.: 0.0858
Epoch 46
5s - loss: 0.1073 - acc.: 0.0936 - val. loss: 0.1077 - val. acc.: 0.0862
Epoch 47
5s - loss: 0.1071 - acc.: 0.0934 - val. loss: 0.1067 - val. acc.: 0.0927
Epoch 48
5s - loss: 0.1068 - acc.: 0.0942 - val. loss: 0.1062 - val. acc.: 0.0901
Epoch 49
5s - loss: 0.1066 - acc.: 0.0958 - val. loss: 0.1071 - val. acc.: 0.0885
Epoch 50
5s - loss: 0.1064 - acc.: 0.0967 - val. loss: 0.1077 - val. acc.: 0.1050
Epoch 51
5s - loss: 0.1062 - acc.: 0.0977 - val. loss: 0.1061 - val. acc.: 0.0978
Epoch 52
5s - loss: 0.1061 - acc.: 0.0978 - val. loss: 0.1058 - val. acc.: 0.1281
Epoch 53
5s - loss: 0.1059 - acc.: 0.0996 - val. loss: 0.1059 - val. acc.: 0.1170
Epoch 54
5s - loss: 0.1057 - acc.: 0.1005 - val. loss: 0.1061 - val. acc.: 0.1131
Epoch 55
5s - loss: 0.1056 - acc.: 0.1006 - val. loss: 0.1053 - val. acc.: 0.1057
Epoch 56
5s - loss: 0.1054 - acc.: 0.1026 - val. loss: 0.1050 - val. acc.: 0.0962
Epoch 57
5s - loss: 0.1052 - acc.: 0.1028 - val. loss: 0.1064 - val. acc.: 0.1244
Epoch 58
5s - loss: 0.1050 - acc.: 0.1040 - val. loss: 0.1046 - val. acc.: 0.0971
Epoch 59
5s - loss: 0.1049 - acc.: 0.1055 - val. loss: 0.1057 - val. acc.: 0.1155
Epoch 60
5s - loss: 0.1048 - acc.: 0.1056 - val. loss: 0.1049 - val. acc.: 0.1260
Epoch 61
5s - loss: 0.1046 - acc.: 0.1065 - val. loss: 0.1061 - val. acc.: 0.1049
Epoch 62
5s - loss: 0.1045 - acc.: 0.1051 - val. loss: 0.1058 - val. acc.: 0.0965
Epoch 63
5s - loss: 0.1043 - acc.: 0.1070 - val. loss: 0.1053 - val. acc.: 0.1230
Epoch 64
5s - loss: 0.1041 - acc.: 0.1069 - val. loss: 0.1037 - val. acc.: 0.1164
Epoch 65
5s - loss: 0.1040 - acc.: 0.1082 - val. loss: 0.1040 - val. acc.: 0.1081
Epoch 66
5s - loss: 0.1038 - acc.: 0.1082 - val. loss: 0.1038 - val. acc.: 0.1261
Epoch 67
5s - loss: 0.1037 - acc.: 0.1077 - val. loss: 0.1049 - val. acc.: 0.0906
Epoch 68
5s - loss: 0.1035 - acc.: 0.1079 - val. loss: 0.1026 - val. acc.: 0.1121
Epoch 69
5s - loss: 0.1034 - acc.: 0.1088 - val. loss: 0.1036 - val. acc.: 0.0965
Epoch 70
5s - loss: 0.1032 - acc.: 0.1078 - val. loss: 0.1025 - val. acc.: 0.1144
Epoch 71
5s - loss: 0.1031 - acc.: 0.1083 - val. loss: 0.1029 - val. acc.: 0.1089
Epoch 72
5s - loss: 0.1030 - acc.: 0.1084 - val. loss: 0.1035 - val. acc.: 0.1035
Epoch 73
5s - loss: 0.1029 - acc.: 0.1079 - val. loss: 0.1028 - val. acc.: 0.1178
Epoch 74
5s - loss: 0.1027 - acc.: 0.1079 - val. loss: 0.1035 - val. acc.: 0.1232
Epoch 75
5s - loss: 0.1026 - acc.: 0.1075 - val. loss: 0.1025 - val. acc.: 0.1031
Epoch 76
5s - loss: 0.1025 - acc.: 0.1076 - val. loss: 0.1024 - val. acc.: 0.1341
Epoch 77
5s - loss: 0.1024 - acc.: 0.1084 - val. loss: 0.1046 - val. acc.: 0.0870
Epoch 78
5s - loss: 0.1023 - acc.: 0.1070 - val. loss: 0.1022 - val. acc.: 0.1040
Epoch 79
5s - loss: 0.1022 - acc.: 0.1074 - val. loss: 0.1023 - val. acc.: 0.0911
Epoch 80
5s - loss: 0.1021 - acc.: 0.1078 - val. loss: 0.1029 - val. acc.: 0.0942
Epoch 81
5s - loss: 0.1019 - acc.: 0.1077 - val. loss: 0.1020 - val. acc.: 0.1087
Epoch 82
5s - loss: 0.1019 - acc.: 0.1076 - val. loss: 0.1027 - val. acc.: 0.0953
Epoch 83
5s - loss: 0.1017 - acc.: 0.1074 - val. loss: 0.1017 - val. acc.: 0.1191
Epoch 84
5s - loss: 0.1017 - acc.: 0.1075 - val. loss: 0.1018 - val. acc.: 0.1169
Epoch 85
5s - loss: 0.1016 - acc.: 0.1073 - val. loss: 0.1012 - val. acc.: 0.1213
Epoch 86
5s - loss: 0.1015 - acc.: 0.1074 - val. loss: 0.1025 - val. acc.: 0.1267
Epoch 87
5s - loss: 0.1014 - acc.: 0.1069 - val. loss: 0.1027 - val. acc.: 0.0957
Epoch 88
5s - loss: 0.1013 - acc.: 0.1078 - val. loss: 0.1072 - val. acc.: 0.1300
Epoch 89
5s - loss: 0.1012 - acc.: 0.1081 - val. loss: 0.1008 - val. acc.: 0.1175
Epoch 90
5s - loss: 0.1012 - acc.: 0.1083 - val. loss: 0.1018 - val. acc.: 0.1132
Epoch 91
6s - loss: 0.1011 - acc.: 0.1077 - val. loss: 0.1009 - val. acc.: 0.0962
Epoch 92
5s - loss: 0.1010 - acc.: 0.1081 - val. loss: 0.1007 - val. acc.: 0.1060
Epoch 93
5s - loss: 0.1009 - acc.: 0.1081 - val. loss: 0.1017 - val. acc.: 0.1007
Epoch 94
5s - loss: 0.1009 - acc.: 0.1076 - val. loss: 0.1005 - val. acc.: 0.1077
Epoch 95
5s - loss: 0.1008 - acc.: 0.1077 - val. loss: 0.1002 - val. acc.: 0.1045
Epoch 96
5s - loss: 0.1007 - acc.: 0.1075 - val. loss: 0.1004 - val. acc.: 0.1204
Epoch 97
5s - loss: 0.1007 - acc.: 0.1076 - val. loss: 0.1004 - val. acc.: 0.0989
Epoch 98
5s - loss: 0.1006 - acc.: 0.1087 - val. loss: 0.0999 - val. acc.: 0.0903
Epoch 99
5s - loss: 0.1006 - acc.: 0.1077 - val. loss: 0.1010 - val. acc.: 0.1078
/Users/joshuafass/anaconda/envs/py27/lib/python2.7/site-packages/IPython/kernel/__main__.py:7: DeprecationWarning: numpy boolean negative (the unary `-` operator) is deprecated, use the bitwise_xor (the `^` operator) or the logical_xor function instead.

In [289]:
def partial_apply(model,X):
    layer1 = model.layers[0]
    layer1_out = np.tanh(np.dot(X,layer1.W.get_value())+layer1.b.get_value())
    layer2 = model.layers[2]
    layer2_out = layer2.activation(np.dot(layer1_out,layer2.W.get_value())+layer2.b.get_value())
    return layer2_out

In [291]:
X_nn = partial_apply(model,X)
plt.scatter(X_nn[:,0],X_nn[:,1],linewidths=0,alpha=0.5,s=2,c=np.arange(len(X_nn)))


Out[291]:
<matplotlib.collections.PathCollection at 0x12b5a0910>

In [2]:
X_pca = PCA(2).fit_transform(np.vstack(dhft))
plt.scatter(X_pca[:,0],X_pca[:,1],linewidths=0,alpha=0.5,s=2,c=np.arange(len(X_pca)))


Out[2]:
<matplotlib.collections.PathCollection at 0x10cd8a9d0>

In [287]:
pca = PCA(2)
pca.fit(X)
np.sum((pca.inverse_transform(pca.transform(X))-X)**2)/(len(X)*X.shape[1])


Out[287]:
0.17575058460884355

In [286]:
X.shape


Out[286]:
(280000, 84)

In [282]:
dhft[0].shape


Out[282]:
(10000, 84)

In [81]:
from msmbuilder.example_datasets import MetEnkephalin

In [82]:
met = MetEnkephalin().get()

In [84]:
print(met.DESCR)


The dataset consists of ten ~50 ns molecular dynamics (MD) simulation
trajectories of the 5 residue Met-enkaphalin peptide. The aggregate
sampling is 499.58 ns. Simulations were performed starting from the 1st
model in the 1PLX PDB file, solvated with 832 TIP3P water molecules using
OpenMM 6.0. The coordinates (protein only -- the water was stripped)
are saved every 5 picoseconds. Each of the ten trajectories is roughly
50 ns long and contains about 10,000 snapshots.

Forcefield: amber99sb-ildn; water: tip3p; nonbonded method: PME; cutoffs:
1nm; bonds to hydrogen were constrained; integrator: langevin dynamics;
temperature: 300K; friction coefficient: 1.0/ps; pressure control: Monte
Carlo barostat (interval of 25 steps); timestep 2 fs.

The dataset is available on figshare at

http://dx.doi.org/10.6084/m9.figshare.1026324


In [ ]: