In [4]:
%pylab inline
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
import glob
pylab.rcParams['figure.figsize'] = (16, 12)
Populating the interactive namespace from numpy and matplotlib
In [2]:
data_dir = 'data/beatles/chordlab/The_Beatles/'
In [5]:
chord_files = glob.glob(data_dir + '*/*.lab.pcs.tsv')
In [9]:
print('total number of songs', len(chord_files))
chord_files[:5]
total number of songs 180
Out[9]:
['data/beatles/chordlab/The_Beatles/01_-_Please_Please_Me/01_-_I_Saw_Her_Standing_There.lab.pcs.tsv',
'data/beatles/chordlab/The_Beatles/01_-_Please_Please_Me/02_-_Misery.lab.pcs.tsv',
'data/beatles/chordlab/The_Beatles/01_-_Please_Please_Me/03_-_Anna_(Go_To_Him).lab.pcs.tsv',
'data/beatles/chordlab/The_Beatles/01_-_Please_Please_Me/04_-_Chains.lab.pcs.tsv',
'data/beatles/chordlab/The_Beatles/01_-_Please_Please_Me/05_-_Boys.lab.pcs.tsv']
In [373]:
def read_chord_file(path):
return pd.read_csv(path, sep='\t')
def add_track_id(df, track_id):
df['track_id'] = track_id
return df
def track_title(path):
return '/'.join(path.split('/')[-2:]).replace('.lab.pcs.tsv', '')
def read_key_file(path):
return pd.read_csv(path, sep='\t', header=None, names=['start', 'end', 'silence', 'key_label'])
In [294]:
selected_files = chord_files
all_chords = pd.concat(add_track_id(read_chord_file(file), track_id) for (track_id, file) in enumerate(selected_files))
all_chords['duration'] = all_chords['end'] - all_chords['start']
nonsilent_chords = all_chords[all_chords['label'] != 'N']
print('total number of chord segments', len(all_chords))
total number of chord segments 14621
In [357]:
key_files = glob.glob('data/beatles/keylab/The_Beatles/*/*.lab')
len(key_files)
Out[357]:
180
In [374]:
all_keys = pd.concat(add_track_id(read_key_file(file), track_id) for (track_id, file) in enumerate(key_files))
In [380]:
print('all key segments:', len(all_keys))
print('non-silence key segments:', len(all_keys['key_label'].dropna()))
all key segments: 408
non-silence key segments: 284
In [381]:
all_keys['key_label'].value_counts()
Out[381]:
A 51
G 40
E 35
C 28
A:minor 25
D 25
F 13
E:minor 11
D:minor 9
C:minor 8
Eb 6
B 6
F:minor 5
Bb 5
C#:minor 4
F#:minor 2
G:mixolydian 2
D:mixolydian 1
Db 1
D:aeolian 1
Ab 1
F:dorian 1
A:mixolydian 1
C#:modal 1
F# 1
Eb:minor 1
dtype: int64
In [382]:
all_keys['key_label'].map(lambda label: label.replace(':.*', ''))
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-382-93362e46ab6c> in <module>()
----> 1 all_keys['key_label'].map(lambda label: label.replace(':.*', ''))
/Users/bzamecnik/Documents/dev/pyvenv/py3.4/lib/python3.4/site-packages/pandas/core/series.py in map(self, arg, na_action)
2006 index=self.index).__finalize__(self)
2007 else:
-> 2008 mapped = map_f(values, arg)
2009 return self._constructor(mapped,
2010 index=self.index).__finalize__(self)
pandas/src/inference.pyx in pandas.lib.map_infer (pandas/lib.c:56520)()
<ipython-input-382-93362e46ab6c> in <lambda>(label)
----> 1 all_keys['key_label'].map(lambda label: label.replace(':.*', ''))
AttributeError: 'float' object has no attribute 'replace'
In [123]:
pcs_columns = ['C', 'Db', 'D', 'Eb', 'E', 'F', 'Gb', 'G', 'Ab', 'A', 'Bb', 'B']
In [124]:
def find_track(name):
return [i for (i, path) in enumerate(chord_files) if name in path]
In [125]:
def draw_track(track_id):
print(track_title(chord_files[track_id]))
track = all_chords[all_tracks['track_id'] == track_id]
matshow(track[pcs_columns].T)
grid(False)
gca().set_yticks(np.arange(12))
gca().set_yticklabels(pcs_columns)
Example time line of chords represented as binary pitch class sets in a single song:
In [79]:
draw_track(find_track('Yesterday')[0])
05_-_Help!/13_-_Yesterday
Distribution of pitch classes accross all songs:
In [126]:
pc_histogram = pd.DataFrame({'pitch_class': pcs_columns, 'relative_count': nonsilent_chords[pcs_columns].mean()})
stem(pc_histogram['relative_count'])
gca().set_xticks(np.arange(12))
gca().set_xticklabels(pcs_columns);
In [127]:
pc_histogram.sort('relative_count', ascending=False, inplace=True)
In [128]:
plot(pc_histogram['relative_count'],'o:')
gca().set_xticks(np.arange(12))
gca().set_xticklabels(pc_histogram['pitch_class']);
ylim(0, 1);
xlim(-.1, 11.1);
Observation: Five most used pitch classes in Beates songs are A, E, D, B and G.
In [130]:
chord_histogram = all_chords['label'].value_counts()
In [131]:
chord_histogram
Out[131]:
A 1568
G 1385
D 1353
E 1039
C 966
B 503
F 489
N 427
A:min 365
E:min 336
Bb 318
B:min 293
F#:min 291
D:min 197
F# 189
...
E:min(2) 1
G:sus4/5 1
E:min(*3)/5 1
E:min(*b3)/5 1
G/#4 1
C#:(1,b3)/b3 1
G:dim/b3 1
A:7(*5,13) 1
C:maj6/5 1
A/7 1
F#:dim/b7 1
Bb:maj 1
G:maj6(9) 1
G:sus4(b7) 1
D:maj(*3) 1
Length: 407, dtype: int64
In [132]:
print('number of unique chords (including silence):', len(chord_histogram))
number of unique chords (including silence): 407
Distribution of chords usage:
In [133]:
plot(chord_histogram);
Distribution of chord root tones (ie. chord stripped of their quality) - excluding silence:
In [134]:
chord_root_histogram = nonsilent_chords['root'].value_counts()
# convert index from integers to symbolic names
chord_root_histogram.index = pd.Series(pcs_columns)[chord_root_histogram.index].values
chord_root_histogram
Out[134]:
A 2494
D 2258
G 2035
E 1816
C 1391
B 1091
F 866
Gb 707
Bb 469
Db 413
Ab 362
Eb 292
dtype: int64
Still [A,D,G,E,C] is the set of most favorite pitch classes.
In [139]:
#all_chords[pcs_columns + ['track_id']]
Out[139]:
C
Db
D
Eb
E
F
Gb
G
Ab
A
Bb
B
track_id
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
1
0
0
0
1
0
0
1
0
2
0
1
0
0
1
0
0
0
0
1
0
0
0
3
0
0
0
0
1
0
0
0
1
0
0
1
0
4
0
0
0
1
0
0
1
0
0
0
0
1
0
5
0
0
0
0
1
0
0
0
1
0
0
1
0
6
0
0
1
0
1
0
0
0
1
0
0
1
0
7
0
1
0
0
1
0
0
0
0
1
0
0
0
8
1
0
0
0
1
0
0
0
0
1
0
0
0
9
0
0
0
0
1
0
0
0
1
0
0
1
0
10
0
0
0
1
0
0
1
0
0
0
0
1
0
11
0
0
0
0
1
0
0
0
1
0
0
1
0
12
0
1
0
0
1
0
0
0
0
1
0
0
0
13
0
0
0
0
1
0
0
0
1
0
0
1
0
14
0
0
0
1
0
0
1
0
0
0
0
1
0
15
0
0
0
0
1
0
0
0
1
0
0
1
0
16
0
0
1
0
1
0
0
0
1
0
0
1
0
17
0
1
0
0
1
0
0
0
0
1
0
0
0
18
1
0
0
0
1
0
0
0
0
1
0
0
0
19
0
0
0
0
1
0
0
0
1
0
0
1
0
20
0
0
0
1
0
0
1
0
0
0
0
1
0
21
0
0
0
0
1
0
0
0
1
0
0
1
0
22
0
1
0
0
1
0
0
0
0
1
0
0
0
23
0
0
0
1
0
0
1
0
0
0
0
1
0
24
0
1
0
0
1
0
0
0
0
1
0
0
0
25
0
0
0
0
1
0
0
0
1
0
0
1
0
26
0
1
0
0
1
0
0
0
0
1
0
0
0
27
0
0
0
0
1
0
0
0
1
0
0
1
0
28
0
0
0
1
0
0
1
0
0
0
0
1
0
29
0
0
0
0
1
0
0
0
1
0
0
1
0
...
...
...
...
...
...
...
...
...
...
...
...
...
...
54
0
0
1
0
0
0
1
0
0
1
0
0
179
55
0
1
0
0
1
0
0
0
0
1
0
0
179
56
0
0
1
0
0
0
0
1
0
1
0
1
179
57
0
1
0
0
1
0
0
1
0
1
0
0
179
58
0
0
1
0
0
0
1
0
0
1
0
0
179
59
0
1
0
0
1
0
0
0
0
1
0
0
179
60
0
0
1
0
0
0
0
1
0
1
0
1
179
61
0
0
1
0
0
0
1
0
0
1
0
0
179
62
0
1
0
0
1
0
0
1
0
1
0
0
179
63
0
0
1
0
0
0
1
0
0
1
0
0
179
64
0
1
0
0
1
0
0
0
0
1
0
0
179
65
0
1
0
0
1
0
0
0
0
1
0
0
179
66
0
0
1
0
0
0
1
0
0
1
0
0
179
67
0
1
0
0
1
0
0
0
0
1
0
0
179
68
0
0
1
0
0
0
0
1
0
1
0
1
179
69
0
0
1
0
0
0
1
0
0
1
0
0
179
70
0
1
0
0
1
0
0
0
0
1
0
0
179
71
0
0
1
0
0
0
1
0
0
1
0
0
179
72
0
1
0
0
1
0
0
0
0
1
0
0
179
73
0
0
1
0
0
0
0
1
0
1
0
1
179
74
0
0
1
0
0
0
1
0
0
1
0
0
179
75
0
1
0
0
1
0
0
1
0
1
0
0
179
76
0
0
1
0
0
0
1
0
0
1
0
0
179
77
0
1
0
0
1
0
0
0
0
1
0
0
179
78
0
0
1
0
0
0
0
1
0
1
0
1
179
79
0
0
1
0
0
0
1
0
0
1
0
0
179
80
0
1
0
0
1
0
0
1
0
1
0
0
179
81
0
0
1
0
0
0
1
0
0
1
0
0
179
82
1
0
1
0
0
0
1
0
0
1
0
0
179
83
0
0
0
0
0
0
0
0
0
0
0
0
179
14621 rows × 13 columns
In [152]:
all_chords
Out[152]:
start
end
label
root
bass
C
Db
D
Eb
E
F
Gb
G
Ab
A
Bb
B
track_id
duration
0
0.000000
2.612267
N
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
2.612267
1
2.612267
11.459070
E
4
4
0
0
0
0
1
0
0
0
1
0
0
1
0
8.846803
2
11.459070
12.921927
A
9
9
0
1
0
0
1
0
0
0
0
1
0
0
0
1.462857
3
12.921927
17.443474
E
4
4
0
0
0
0
1
0
0
0
1
0
0
1
0
4.521547
4
17.443474
20.410362
B
11
11
0
0
0
1
0
0
1
0
0
0
0
1
0
2.966888
5
20.410362
21.908049
E
4
4
0
0
0
0
1
0
0
0
1
0
0
1
0
1.497687
6
21.908049
23.370907
E:7/3
4
8
0
0
1
0
1
0
0
0
1
0
0
1
0
1.462858
7
23.370907
24.856984
A
9
9
0
1
0
0
1
0
0
0
0
1
0
0
0
1.486077
8
24.856984
26.343061
A:min/b3
9
0
1
0
0
0
1
0
0
0
0
1
0
0
0
1.486077
9
26.343061
27.840748
E
4
4
0
0
0
0
1
0
0
0
1
0
0
1
0
1.497687
10
27.840748
29.350045
B
11
11
0
0
0
1
0
0
1
0
0
0
0
1
0
1.509297
11
29.350045
35.305963
E
4
4
0
0
0
0
1
0
0
0
1
0
0
1
0
5.955918
12
35.305963
36.803650
A
9
9
0
1
0
0
1
0
0
0
0
1
0
0
0
1.497687
13
36.803650
41.263102
E
4
4
0
0
0
0
1
0
0
0
1
0
0
1
0
4.459452
14
41.263102
44.245646
B
11
11
0
0
0
1
0
0
1
0
0
0
0
1
0
2.982544
15
44.245646
45.720113
E
4
4
0
0
0
0
1
0
0
0
1
0
0
1
0
1.474467
16
45.720113
47.206190
E:7/3
4
8
0
0
1
0
1
0
0
0
1
0
0
1
0
1.486077
17
47.206190
48.692267
A
9
9
0
1
0
0
1
0
0
0
0
1
0
0
0
1.486077
18
48.692267
50.155124
A:min/b3
9
0
1
0
0
0
1
0
0
0
0
1
0
0
0
1.462857
19
50.155124
51.652811
E
4
4
0
0
0
0
1
0
0
0
1
0
0
1
0
1.497687
20
51.652811
53.138888
B
11
11
0
0
0
1
0
0
1
0
0
0
0
1
0
1.486077
21
53.138888
56.111043
E
4
4
0
0
0
0
1
0
0
0
1
0
0
1
0
2.972155
22
56.111043
65.131995
A
9
9
0
1
0
0
1
0
0
0
0
1
0
0
0
9.020952
23
65.131995
68.150589
B
11
11
0
0
0
1
0
0
1
0
0
0
0
1
0
3.018594
24
68.150589
71.192403
A
9
9
0
1
0
0
1
0
0
0
0
1
0
0
0
3.041814
25
71.192403
74.199387
E
4
4
0
0
0
0
1
0
0
0
1
0
0
1
0
3.006984
26
74.199387
75.697074
A
9
9
0
1
0
0
1
0
0
0
0
1
0
0
0
1.497687
27
75.697074
80.236575
E
4
4
0
0
0
0
1
0
0
0
1
0
0
1
0
4.539501
28
80.236575
83.208730
B
11
11
0
0
0
1
0
0
1
0
0
0
0
1
0
2.972155
29
83.208730
86.221693
E
4
4
0
0
0
0
1
0
0
0
1
0
0
1
0
3.012963
...
...
...
...
...
...
...
...
...
...
...
...
...
...
...
...
...
...
...
...
54
120.053537
122.004013
D
2
2
0
0
1
0
0
0
1
0
0
1
0
0
179
1.950476
55
122.004013
122.967641
A
9
9
0
1
0
0
1
0
0
0
0
1
0
0
179
0.963628
56
122.967641
123.942879
G/2
7
9
0
0
1
0
0
0
0
1
0
1
0
1
179
0.975238
57
123.942879
127.843832
A:7
9
9
0
1
0
0
1
0
0
1
0
1
0
0
179
3.900953
58
127.843832
129.805918
D
2
2
0
0
1
0
0
0
1
0
0
1
0
0
179
1.962086
59
129.805918
130.792766
A
9
9
0
1
0
0
1
0
0
0
0
1
0
0
179
0.986848
60
130.792766
131.245555
G/2
7
9
0
0
1
0
0
0
0
1
0
1
0
1
179
0.452789
61
131.245555
131.756394
D/5
2
9
0
0
1
0
0
0
1
0
0
1
0
0
179
0.510839
62
131.756394
135.622517
A:7
9
9
0
1
0
0
1
0
0
1
0
1
0
0
179
3.866123
63
135.622517
137.549773
D
2
2
0
0
1
0
0
0
1
0
0
1
0
0
179
1.927256
64
137.549773
139.477029
A
9
9
0
1
0
0
1
0
0
0
0
1
0
0
179
1.927256
65
139.477029
143.308321
A
9
9
0
1
0
0
1
0
0
0
0
1
0
0
179
3.831292
66
143.308321
145.258798
D
2
2
0
0
1
0
0
0
1
0
0
1
0
0
179
1.950477
67
145.258798
146.245646
A
9
9
0
1
0
0
1
0
0
0
0
1
0
0
179
0.986848
68
146.245646
146.733265
G/2
7
9
0
0
1
0
0
0
0
1
0
1
0
1
179
0.487619
69
146.733265
147.244104
D/5
2
9
0
0
1
0
0
0
1
0
0
1
0
0
179
0.510839
70
147.244104
151.110226
A
9
9
0
1
0
0
1
0
0
0
0
1
0
0
179
3.866122
71
151.110226
153.060702
D
2
2
0
0
1
0
0
0
1
0
0
1
0
0
179
1.950476
72
153.060702
154.024331
A
9
9
0
1
0
0
1
0
0
0
0
1
0
0
179
0.963629
73
154.024331
154.523560
G/2
7
9
0
0
1
0
0
0
0
1
0
1
0
1
179
0.499229
74
154.523560
155.022789
D/5
2
9
0
0
1
0
0
0
1
0
0
1
0
0
179
0.499229
75
155.022789
158.854081
A:7
9
9
0
1
0
0
1
0
0
1
0
1
0
0
179
3.831292
76
158.854081
160.781337
D
2
2
0
0
1
0
0
0
1
0
0
1
0
0
179
1.927256
77
160.781337
161.489546
A
9
9
0
1
0
0
1
0
0
0
0
1
0
0
179
0.708209
78
161.489546
162.174535
G/2
7
9
0
0
1
0
0
0
0
1
0
1
0
1
179
0.684989
79
162.174535
162.696984
D/5
2
9
0
0
1
0
0
0
1
0
0
1
0
0
179
0.522449
80
162.696984
166.423786
A:7
9
9
0
1
0
0
1
0
0
1
0
1
0
0
179
3.726802
81
166.423786
168.192617
D
2
2
0
0
1
0
0
0
1
0
0
1
0
0
179
1.768831
82
168.192617
174.281557
D:7
2
2
1
0
1
0
0
0
1
0
0
1
0
0
179
6.088940
83
174.281557
186.984490
N
0
0
0
0
0
0
0
0
0
0
0
0
0
0
179
12.702933
14621 rows × 19 columns
Distribution of chord segment duration:
In [147]:
duration = all_chords['duration']
duration.hist(bins=100);
In [151]:
sns.distplot(duration[duration < 10], bins=100)
xlabel('duration (sec)');
First prepare the data into training, validation and test set.
In [258]:
X = all_chords[['duration'] + pcs_columns].astype(np.float32)
y = all_chords['track_id'].astype(np.int32)
Out[258]:
array([180])
In [254]:
from sklearn.cross_validation import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
X_train, X_valid, y_train, y_valid = train_test_split(X_train, y_train, test_size=0.25, random_state=42)
len(X_train), len(X_valid), len(X_test)
Out[254]:
(8772, 2924, 2925)
In [242]:
from sklearn.linear_model import LogisticRegression
In [243]:
lr_model = LogisticRegression()
lr_model.fit(X_train, y_train)
Out[243]:
LogisticRegression(C=1.0, class_weight=None, dual=False, fit_intercept=True,
intercept_scaling=1, penalty='l2', random_state=None, tol=0.0001)
In [275]:
from sklearn.metrics import classification_report, confusion_matrix
y_pred_lr = lr_model.predict(X_valid)
lr_model.score(X_valid, y_valid)
Out[275]:
0.11183310533515732
In [276]:
print(classification_report(y_valid, y_pred_lr))
precision recall f1-score support
0 0.00 0.00 0.00 11
1 0.00 0.00 0.00 6
2 0.02 0.22 0.04 9
3 0.00 0.00 0.00 5
4 0.00 0.00 0.00 14
5 0.23 0.13 0.17 23
6 0.00 0.00 0.00 17
7 0.00 0.00 0.00 16
8 0.00 0.00 0.00 16
9 0.00 0.00 0.00 8
10 0.33 0.15 0.21 27
11 0.12 0.18 0.15 17
12 0.00 0.00 0.00 11
13 0.00 0.00 0.00 16
14 0.00 0.00 0.00 14
15 0.00 0.00 0.00 6
16 0.00 0.00 0.00 12
17 0.07 0.21 0.11 19
18 0.00 0.00 0.00 8
19 0.17 0.07 0.10 15
20 0.00 0.00 0.00 9
21 0.00 0.00 0.00 19
22 0.00 0.00 0.00 16
23 0.00 0.00 0.00 11
24 0.00 0.00 0.00 6
25 0.00 0.00 0.00 18
26 0.00 0.00 0.00 20
27 0.00 0.00 0.00 17
28 0.00 0.00 0.00 16
29 0.05 0.32 0.09 28
30 0.00 0.00 0.00 12
31 0.33 0.06 0.10 18
32 0.00 0.00 0.00 15
33 0.00 0.00 0.00 19
34 0.00 0.00 0.00 11
35 0.00 0.00 0.00 14
36 0.00 0.00 0.00 11
37 0.00 0.00 0.00 17
38 0.00 0.00 0.00 14
39 0.00 0.00 0.00 13
40 0.00 0.00 0.00 14
41 0.00 0.00 0.00 12
42 0.00 0.00 0.00 22
43 0.00 0.00 0.00 17
44 0.00 0.00 0.00 6
45 0.00 0.00 0.00 13
46 0.19 0.73 0.31 15
47 0.00 0.00 0.00 14
48 0.00 0.00 0.00 23
49 0.00 0.00 0.00 21
50 0.06 0.31 0.10 13
51 0.00 0.00 0.00 18
52 0.00 0.00 0.00 15
53 0.00 0.00 0.00 10
54 0.00 0.00 0.00 9
55 0.00 0.00 0.00 10
56 0.00 0.00 0.00 22
57 0.00 0.00 0.00 22
58 0.33 0.08 0.12 26
59 0.00 0.00 0.00 16
60 0.00 0.00 0.00 15
61 0.00 0.00 0.00 6
62 0.00 0.00 0.00 13
63 0.00 0.00 0.00 16
64 0.00 0.00 0.00 23
65 0.00 0.00 0.00 26
66 0.00 0.00 0.00 16
67 0.30 0.18 0.22 17
68 0.00 0.00 0.00 13
69 0.00 0.00 0.00 11
70 0.00 0.00 0.00 6
71 0.04 0.14 0.07 22
72 0.00 0.00 0.00 11
73 0.00 0.00 0.00 14
74 0.00 0.00 0.00 16
75 0.06 0.07 0.06 15
76 0.00 0.00 0.00 14
77 0.12 0.18 0.14 11
78 0.37 0.66 0.47 29
79 0.00 0.00 0.00 21
80 0.20 0.52 0.29 21
81 0.00 0.00 0.00 4
82 0.00 0.00 0.00 11
83 0.12 0.20 0.15 5
84 0.43 0.30 0.35 10
85 0.19 0.12 0.15 25
86 1.00 0.29 0.44 7
87 0.00 0.00 0.00 33
88 0.15 0.11 0.13 27
89 0.05 0.06 0.06 17
90 0.00 0.00 0.00 14
91 0.00 0.00 0.00 11
92 0.00 0.00 0.00 9
93 0.00 0.00 0.00 7
94 0.00 0.00 0.00 15
95 0.00 0.00 0.00 18
96 0.50 0.33 0.40 9
97 0.00 0.00 0.00 9
98 0.00 0.00 0.00 25
99 0.00 0.00 0.00 24
100 0.13 0.24 0.17 17
101 0.78 0.41 0.54 17
102 0.00 0.00 0.00 15
103 0.08 0.04 0.05 25
104 0.50 0.50 0.50 2
105 0.67 0.12 0.20 17
106 0.00 0.00 0.00 15
107 0.00 0.00 0.00 31
108 0.00 0.00 0.00 15
109 0.29 0.10 0.15 20
110 0.00 0.00 0.00 8
111 0.08 0.11 0.10 18
112 0.00 0.00 0.00 7
113 0.40 0.50 0.44 4
114 0.00 0.00 0.00 10
115 0.00 0.00 0.00 25
116 0.10 0.04 0.06 24
117 0.00 0.00 0.00 23
118 0.10 0.24 0.15 37
119 0.05 0.26 0.09 19
120 0.00 0.00 0.00 29
121 0.00 0.00 0.00 20
122 0.28 0.50 0.36 30
123 0.71 0.33 0.45 15
124 0.15 0.45 0.23 20
125 0.00 0.00 0.00 4
126 0.00 0.00 0.00 28
127 0.23 0.25 0.24 28
128 0.00 0.00 0.00 20
129 0.09 0.24 0.13 17
130 0.00 0.00 0.00 11
131 0.11 0.13 0.12 31
132 0.17 0.21 0.19 14
133 0.25 0.22 0.23 23
134 0.00 0.00 0.00 11
135 0.00 0.00 0.00 4
136 0.11 0.10 0.11 10
137 0.00 0.00 0.00 26
138 0.00 0.00 0.00 9
139 0.12 0.25 0.16 12
140 0.04 0.50 0.08 22
141 0.09 0.34 0.14 29
142 0.00 0.00 0.00 25
143 0.33 0.14 0.20 7
144 0.12 0.06 0.08 17
145 0.12 0.17 0.14 12
146 0.00 0.00 0.00 19
147 0.00 0.00 0.00 16
148 0.21 0.17 0.19 18
149 0.15 0.44 0.22 16
150 0.00 0.00 0.00 23
151 0.20 0.10 0.13 10
152 0.00 0.00 0.00 17
153 0.12 0.52 0.20 44
154 0.00 0.00 0.00 16
155 0.13 0.33 0.19 21
156 0.24 0.42 0.31 65
157 0.00 0.00 0.00 22
158 1.00 0.17 0.29 12
159 0.00 0.00 0.00 30
160 0.00 0.00 0.00 5
161 0.00 0.00 0.00 7
162 0.00 0.00 0.00 22
163 0.00 0.00 0.00 12
164 0.00 0.00 0.00 5
165 0.00 0.00 0.00 8
166 0.00 0.00 0.00 15
167 0.00 0.00 0.00 6
168 0.10 0.12 0.11 16
169 0.00 0.00 0.00 19
170 0.00 0.00 0.00 12
171 0.05 0.21 0.08 14
172 0.00 0.00 0.00 8
173 0.07 0.64 0.12 28
174 0.00 0.00 0.00 3
175 0.06 0.25 0.10 32
176 0.13 0.44 0.21 9
177 0.24 0.26 0.25 23
178 0.00 0.00 0.00 5
179 0.00 0.00 0.00 15
avg / total 0.08 0.11 0.08 2924
/Users/bzamecnik/Documents/dev/pyvenv/py3.4/lib/python3.4/site-packages/sklearn/metrics/metrics.py:1771: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.
'precision', 'predicted', average, warn_for)
In [277]:
matshow(confusion_matrix(y_valid, y_pred_lr), cmap=cm.Spectral_r)
colorbar();
In [285]:
import theanets
import climate # some utilities for command line interfaces
climate.enable_default_logging()
exp = theanets.Experiment(
theanets.Classifier,
layers=(13, 50, 180),
hidden_l1=0.1)
exp.train(
(X_train, y_train),
(X_valid, y_valid),
optimize='rmsprop',
learning_rate=0.01,
momentum=0.5)
INFO:theanets.layers:layer hid1: 13 -> 50, logistic, 700 parameters
I 2015-09-16 01:27:40 theanets.layers:370 layer hid1: 13 -> 50, logistic, 700 parameters
INFO:theanets.layers:layer out: 50 -> 180, softmax, 9180 parameters
I 2015-09-16 01:27:40 theanets.layers:370 layer out: 50 -> 180, softmax, 9180 parameters
INFO:theanets.dataset:valid: 46 of 46 mini-batches of (64, 13) -> (64,)
I 2015-09-16 01:27:40 theanets.dataset:158 valid: 46 of 46 mini-batches of (64, 13) -> (64,)
INFO:theanets.dataset:train: 138 of 138 mini-batches of (64, 13) -> (64,)
I 2015-09-16 01:27:40 theanets.dataset:158 train: 138 of 138 mini-batches of (64, 13) -> (64,)
INFO:theanets.main:creating trainer <class 'theanets.trainer.RmsProp'>
I 2015-09-16 01:27:40 theanets.main:195 creating trainer <class 'theanets.trainer.RmsProp'>
INFO:theanets.main:--batch_size = 64
I 2015-09-16 01:27:40 theanets.main:197 --batch_size = 64
INFO:theanets.main:--cg_batches = None
I 2015-09-16 01:27:40 theanets.main:197 --cg_batches = None
INFO:theanets.main:--contractive = 0
I 2015-09-16 01:27:40 theanets.main:197 --contractive = 0
INFO:theanets.main:--decode_from = 1
I 2015-09-16 01:27:40 theanets.main:197 --decode_from = 1
INFO:theanets.main:--global_backtracking = False
I 2015-09-16 01:27:40 theanets.main:197 --global_backtracking = False
INFO:theanets.main:--gradient_clip = 1000000.0
I 2015-09-16 01:27:40 theanets.main:197 --gradient_clip = 1000000.0
INFO:theanets.main:--help_activation = False
I 2015-09-16 01:27:40 theanets.main:197 --help_activation = False
INFO:theanets.main:--help_optimize = False
I 2015-09-16 01:27:40 theanets.main:197 --help_optimize = False
INFO:theanets.main:--hidden_activation = logistic
I 2015-09-16 01:27:40 theanets.main:197 --hidden_activation = logistic
INFO:theanets.main:--hidden_dropouts = 0
I 2015-09-16 01:27:40 theanets.main:197 --hidden_dropouts = 0
INFO:theanets.main:--hidden_l1 = 0.1
I 2015-09-16 01:27:40 theanets.main:197 --hidden_l1 = 0.1
INFO:theanets.main:--hidden_l2 = 0
I 2015-09-16 01:27:40 theanets.main:197 --hidden_l2 = 0
INFO:theanets.main:--hidden_noise = 0
I 2015-09-16 01:27:40 theanets.main:197 --hidden_noise = 0
INFO:theanets.main:--initial_lambda = 1.0
I 2015-09-16 01:27:40 theanets.main:197 --initial_lambda = 1.0
INFO:theanets.main:--input_dropouts = 0
I 2015-09-16 01:27:40 theanets.main:197 --input_dropouts = 0
INFO:theanets.main:--input_noise = 0
I 2015-09-16 01:27:40 theanets.main:197 --input_noise = 0
INFO:theanets.main:--layers = (13, 50, 180)
I 2015-09-16 01:27:40 theanets.main:197 --layers = (13, 50, 180)
INFO:theanets.main:--learning_rate = 0.01
I 2015-09-16 01:27:40 theanets.main:197 --learning_rate = 0.01
INFO:theanets.main:--max_gradient_norm = 1000000.0
I 2015-09-16 01:27:40 theanets.main:197 --max_gradient_norm = 1000000.0
INFO:theanets.main:--min_improvement = 0.01
I 2015-09-16 01:27:40 theanets.main:197 --min_improvement = 0.01
INFO:theanets.main:--momentum = 0.5
I 2015-09-16 01:27:40 theanets.main:197 --momentum = 0.5
INFO:theanets.main:--optimize = ()
I 2015-09-16 01:27:40 theanets.main:197 --optimize = ()
INFO:theanets.main:--output_activation = linear
I 2015-09-16 01:27:40 theanets.main:197 --output_activation = linear
INFO:theanets.main:--patience = 4
I 2015-09-16 01:27:40 theanets.main:197 --patience = 4
INFO:theanets.main:--preconditioner = False
I 2015-09-16 01:27:40 theanets.main:197 --preconditioner = False
INFO:theanets.main:--recurrent_error_start = 3
I 2015-09-16 01:27:40 theanets.main:197 --recurrent_error_start = 3
INFO:theanets.main:--rms_halflife = 7
I 2015-09-16 01:27:40 theanets.main:197 --rms_halflife = 7
INFO:theanets.main:--rprop_decrease = 0.99
I 2015-09-16 01:27:40 theanets.main:197 --rprop_decrease = 0.99
INFO:theanets.main:--rprop_increase = 1.01
I 2015-09-16 01:27:40 theanets.main:197 --rprop_increase = 1.01
INFO:theanets.main:--rprop_max_step = 1.0
I 2015-09-16 01:27:40 theanets.main:197 --rprop_max_step = 1.0
INFO:theanets.main:--rprop_min_step = 0.0
I 2015-09-16 01:27:40 theanets.main:197 --rprop_min_step = 0.0
INFO:theanets.main:--save_every = 0
I 2015-09-16 01:27:40 theanets.main:197 --save_every = 0
INFO:theanets.main:--save_progress = None
I 2015-09-16 01:27:40 theanets.main:197 --save_progress = None
INFO:theanets.main:--tied_weights = False
I 2015-09-16 01:27:40 theanets.main:197 --tied_weights = False
INFO:theanets.main:--train_batches = None
I 2015-09-16 01:27:40 theanets.main:197 --train_batches = None
INFO:theanets.main:--valid_batches = None
I 2015-09-16 01:27:40 theanets.main:197 --valid_batches = None
INFO:theanets.main:--validate_every = 10
I 2015-09-16 01:27:40 theanets.main:197 --validate_every = 10
INFO:theanets.main:--weight_l1 = 0
I 2015-09-16 01:27:40 theanets.main:197 --weight_l1 = 0
INFO:theanets.main:--weight_l2 = 0
I 2015-09-16 01:27:40 theanets.main:197 --weight_l2 = 0
INFO:theanets.trainer:compiling evaluation function
I 2015-09-16 01:27:40 theanets.trainer:129 compiling evaluation function
INFO:theanets.trainer:compiling RmsProp learning function
I 2015-09-16 01:27:41 theanets.trainer:296 compiling RmsProp learning function
INFO:theanets.trainer:validation 0 loss=8.420953 err=5.80 hid1<0.1=0.9 hid1<0.9=99.7 out<0.1=100.0 out<0.9=100.0 acc=0.20 *
I 2015-09-16 01:27:54 theanets.trainer:169 validation 0 loss=8.420953 err=5.80 hid1<0.1=0.9 hid1<0.9=99.7 out<0.1=100.0 out<0.9=100.0 acc=0.20 *
INFO:theanets.trainer:RmsProp 1 loss=5.708115 err=5.26 hid1<0.1=75.1 hid1<0.9=100.0 out<0.1=100.0 out<0.9=100.0 acc=1.20
I 2015-09-16 01:27:55 theanets.trainer:169 RmsProp 1 loss=5.708115 err=5.26 hid1<0.1=75.1 hid1<0.9=100.0 out<0.1=100.0 out<0.9=100.0 acc=1.20
INFO:theanets.trainer:RmsProp 2 loss=5.221023 err=5.14 hid1<0.1=98.2 hid1<0.9=100.0 out<0.1=100.0 out<0.9=100.0 acc=1.39
I 2015-09-16 01:27:55 theanets.trainer:169 RmsProp 2 loss=5.221023 err=5.14 hid1<0.1=98.2 hid1<0.9=100.0 out<0.1=100.0 out<0.9=100.0 acc=1.39
INFO:theanets.trainer:RmsProp 3 loss=5.159905 err=5.11 hid1<0.1=99.1 hid1<0.9=100.0 out<0.1=100.0 out<0.9=100.0 acc=1.42
I 2015-09-16 01:27:55 theanets.trainer:169 RmsProp 3 loss=5.159905 err=5.11 hid1<0.1=99.1 hid1<0.9=100.0 out<0.1=100.0 out<0.9=100.0 acc=1.42
INFO:theanets.trainer:RmsProp 4 loss=5.141227 err=5.11 hid1<0.1=99.4 hid1<0.9=100.0 out<0.1=100.0 out<0.9=100.0 acc=1.52
I 2015-09-16 01:27:55 theanets.trainer:169 RmsProp 4 loss=5.141227 err=5.11 hid1<0.1=99.4 hid1<0.9=100.0 out<0.1=100.0 out<0.9=100.0 acc=1.52
INFO:theanets.trainer:RmsProp 5 loss=5.132797 err=5.10 hid1<0.1=99.5 hid1<0.9=100.0 out<0.1=100.0 out<0.9=100.0 acc=1.38
I 2015-09-16 01:27:56 theanets.trainer:169 RmsProp 5 loss=5.132797 err=5.10 hid1<0.1=99.5 hid1<0.9=100.0 out<0.1=100.0 out<0.9=100.0 acc=1.38
INFO:theanets.trainer:RmsProp 6 loss=5.126067 err=5.10 hid1<0.1=99.5 hid1<0.9=100.0 out<0.1=100.0 out<0.9=100.0 acc=1.70
I 2015-09-16 01:27:56 theanets.trainer:169 RmsProp 6 loss=5.126067 err=5.10 hid1<0.1=99.5 hid1<0.9=100.0 out<0.1=100.0 out<0.9=100.0 acc=1.70
INFO:theanets.trainer:RmsProp 7 loss=5.108299 err=5.08 hid1<0.1=99.3 hid1<0.9=100.0 out<0.1=100.0 out<0.9=100.0 acc=2.31
I 2015-09-16 01:27:57 theanets.trainer:169 RmsProp 7 loss=5.108299 err=5.08 hid1<0.1=99.3 hid1<0.9=100.0 out<0.1=100.0 out<0.9=100.0 acc=2.31
INFO:theanets.trainer:RmsProp 8 loss=5.077066 err=5.04 hid1<0.1=99.2 hid1<0.9=99.9 out<0.1=100.0 out<0.9=100.0 acc=2.52
I 2015-09-16 01:27:57 theanets.trainer:169 RmsProp 8 loss=5.077066 err=5.04 hid1<0.1=99.2 hid1<0.9=99.9 out<0.1=100.0 out<0.9=100.0 acc=2.52
INFO:theanets.trainer:RmsProp 9 loss=5.042741 err=5.01 hid1<0.1=99.2 hid1<0.9=99.9 out<0.1=100.0 out<0.9=100.0 acc=2.37
I 2015-09-16 01:27:57 theanets.trainer:169 RmsProp 9 loss=5.042741 err=5.01 hid1<0.1=99.2 hid1<0.9=99.9 out<0.1=100.0 out<0.9=100.0 acc=2.37
INFO:theanets.trainer:RmsProp 10 loss=5.013196 err=4.98 hid1<0.1=99.2 hid1<0.9=99.8 out<0.1=100.0 out<0.9=100.0 acc=2.66
I 2015-09-16 01:27:58 theanets.trainer:169 RmsProp 10 loss=5.013196 err=4.98 hid1<0.1=99.2 hid1<0.9=99.8 out<0.1=100.0 out<0.9=100.0 acc=2.66
INFO:theanets.trainer:validation 1 loss=5.000525 err=4.97 hid1<0.1=99.3 hid1<0.9=99.8 out<0.1=100.0 out<0.9=100.0 acc=2.94 *
I 2015-09-16 01:27:58 theanets.trainer:169 validation 1 loss=5.000525 err=4.97 hid1<0.1=99.3 hid1<0.9=99.8 out<0.1=100.0 out<0.9=100.0 acc=2.94 *
INFO:theanets.trainer:RmsProp 11 loss=4.990588 err=4.96 hid1<0.1=99.2 hid1<0.9=99.8 out<0.1=100.0 out<0.9=100.0 acc=2.37
I 2015-09-16 01:27:58 theanets.trainer:169 RmsProp 11 loss=4.990588 err=4.96 hid1<0.1=99.2 hid1<0.9=99.8 out<0.1=100.0 out<0.9=100.0 acc=2.37
INFO:theanets.trainer:RmsProp 12 loss=4.971305 err=4.94 hid1<0.1=99.2 hid1<0.9=99.8 out<0.1=100.0 out<0.9=100.0 acc=2.77
I 2015-09-16 01:27:59 theanets.trainer:169 RmsProp 12 loss=4.971305 err=4.94 hid1<0.1=99.2 hid1<0.9=99.8 out<0.1=100.0 out<0.9=100.0 acc=2.77
INFO:theanets.trainer:RmsProp 13 loss=4.955299 err=4.92 hid1<0.1=99.1 hid1<0.9=99.8 out<0.1=100.0 out<0.9=100.0 acc=2.49
I 2015-09-16 01:27:59 theanets.trainer:169 RmsProp 13 loss=4.955299 err=4.92 hid1<0.1=99.1 hid1<0.9=99.8 out<0.1=100.0 out<0.9=100.0 acc=2.49
INFO:theanets.trainer:RmsProp 14 loss=4.937040 err=4.90 hid1<0.1=98.9 hid1<0.9=99.8 out<0.1=100.0 out<0.9=100.0 acc=2.84
I 2015-09-16 01:27:59 theanets.trainer:169 RmsProp 14 loss=4.937040 err=4.90 hid1<0.1=98.9 hid1<0.9=99.8 out<0.1=100.0 out<0.9=100.0 acc=2.84
INFO:theanets.trainer:RmsProp 15 loss=4.912410 err=4.87 hid1<0.1=98.6 hid1<0.9=99.8 out<0.1=100.0 out<0.9=100.0 acc=2.94
I 2015-09-16 01:27:59 theanets.trainer:169 RmsProp 15 loss=4.912410 err=4.87 hid1<0.1=98.6 hid1<0.9=99.8 out<0.1=100.0 out<0.9=100.0 acc=2.94
INFO:theanets.trainer:RmsProp 16 loss=4.883998 err=4.83 hid1<0.1=98.5 hid1<0.9=99.7 out<0.1=100.0 out<0.9=100.0 acc=3.62
I 2015-09-16 01:28:00 theanets.trainer:169 RmsProp 16 loss=4.883998 err=4.83 hid1<0.1=98.5 hid1<0.9=99.7 out<0.1=100.0 out<0.9=100.0 acc=3.62
INFO:theanets.trainer:RmsProp 17 loss=4.851658 err=4.80 hid1<0.1=98.4 hid1<0.9=99.7 out<0.1=100.0 out<0.9=100.0 acc=4.06
I 2015-09-16 01:28:00 theanets.trainer:169 RmsProp 17 loss=4.851658 err=4.80 hid1<0.1=98.4 hid1<0.9=99.7 out<0.1=100.0 out<0.9=100.0 acc=4.06
INFO:theanets.trainer:RmsProp 18 loss=4.820749 err=4.76 hid1<0.1=98.4 hid1<0.9=99.6 out<0.1=100.0 out<0.9=100.0 acc=4.19
I 2015-09-16 01:28:00 theanets.trainer:169 RmsProp 18 loss=4.820749 err=4.76 hid1<0.1=98.4 hid1<0.9=99.6 out<0.1=100.0 out<0.9=100.0 acc=4.19
INFO:theanets.trainer:RmsProp 19 loss=4.792747 err=4.73 hid1<0.1=98.4 hid1<0.9=99.5 out<0.1=100.0 out<0.9=100.0 acc=4.11
I 2015-09-16 01:28:01 theanets.trainer:169 RmsProp 19 loss=4.792747 err=4.73 hid1<0.1=98.4 hid1<0.9=99.5 out<0.1=100.0 out<0.9=100.0 acc=4.11
INFO:theanets.trainer:RmsProp 20 loss=4.768692 err=4.71 hid1<0.1=98.4 hid1<0.9=99.5 out<0.1=100.0 out<0.9=100.0 acc=4.08
I 2015-09-16 01:28:01 theanets.trainer:169 RmsProp 20 loss=4.768692 err=4.71 hid1<0.1=98.4 hid1<0.9=99.5 out<0.1=100.0 out<0.9=100.0 acc=4.08
INFO:theanets.trainer:validation 2 loss=4.773251 err=4.71 hid1<0.1=98.3 hid1<0.9=99.5 out<0.1=100.0 out<0.9=100.0 acc=4.18 *
I 2015-09-16 01:28:01 theanets.trainer:169 validation 2 loss=4.773251 err=4.71 hid1<0.1=98.3 hid1<0.9=99.5 out<0.1=100.0 out<0.9=100.0 acc=4.18 *
INFO:theanets.trainer:RmsProp 21 loss=4.744812 err=4.68 hid1<0.1=98.2 hid1<0.9=99.4 out<0.1=100.0 out<0.9=100.0 acc=4.36
I 2015-09-16 01:28:02 theanets.trainer:169 RmsProp 21 loss=4.744812 err=4.68 hid1<0.1=98.2 hid1<0.9=99.4 out<0.1=100.0 out<0.9=100.0 acc=4.36
INFO:theanets.trainer:RmsProp 22 loss=4.716956 err=4.64 hid1<0.1=97.8 hid1<0.9=99.4 out<0.1=100.0 out<0.9=100.0 acc=4.65
I 2015-09-16 01:28:02 theanets.trainer:169 RmsProp 22 loss=4.716956 err=4.64 hid1<0.1=97.8 hid1<0.9=99.4 out<0.1=100.0 out<0.9=100.0 acc=4.65
INFO:theanets.trainer:RmsProp 23 loss=4.685604 err=4.60 hid1<0.1=97.5 hid1<0.9=99.4 out<0.1=100.0 out<0.9=100.0 acc=4.88
I 2015-09-16 01:28:02 theanets.trainer:169 RmsProp 23 loss=4.685604 err=4.60 hid1<0.1=97.5 hid1<0.9=99.4 out<0.1=100.0 out<0.9=100.0 acc=4.88
INFO:theanets.trainer:RmsProp 24 loss=4.656520 err=4.57 hid1<0.1=97.4 hid1<0.9=99.3 out<0.1=100.0 out<0.9=100.0 acc=5.24
I 2015-09-16 01:28:02 theanets.trainer:169 RmsProp 24 loss=4.656520 err=4.57 hid1<0.1=97.4 hid1<0.9=99.3 out<0.1=100.0 out<0.9=100.0 acc=5.24
INFO:theanets.trainer:RmsProp 25 loss=4.630331 err=4.54 hid1<0.1=97.4 hid1<0.9=99.2 out<0.1=100.0 out<0.9=100.0 acc=5.42
I 2015-09-16 01:28:03 theanets.trainer:169 RmsProp 25 loss=4.630331 err=4.54 hid1<0.1=97.4 hid1<0.9=99.2 out<0.1=100.0 out<0.9=100.0 acc=5.42
INFO:theanets.trainer:RmsProp 26 loss=4.609091 err=4.52 hid1<0.1=97.3 hid1<0.9=99.2 out<0.1=100.0 out<0.9=100.0 acc=5.85
I 2015-09-16 01:28:03 theanets.trainer:169 RmsProp 26 loss=4.609091 err=4.52 hid1<0.1=97.3 hid1<0.9=99.2 out<0.1=100.0 out<0.9=100.0 acc=5.85
INFO:theanets.trainer:RmsProp 27 loss=4.591471 err=4.50 hid1<0.1=97.2 hid1<0.9=99.1 out<0.1=100.0 out<0.9=100.0 acc=5.59
I 2015-09-16 01:28:03 theanets.trainer:169 RmsProp 27 loss=4.591471 err=4.50 hid1<0.1=97.2 hid1<0.9=99.1 out<0.1=100.0 out<0.9=100.0 acc=5.59
INFO:theanets.trainer:RmsProp 28 loss=4.575965 err=4.48 hid1<0.1=97.1 hid1<0.9=99.1 out<0.1=100.0 out<0.9=100.0 acc=5.81
I 2015-09-16 01:28:04 theanets.trainer:169 RmsProp 28 loss=4.575965 err=4.48 hid1<0.1=97.1 hid1<0.9=99.1 out<0.1=100.0 out<0.9=100.0 acc=5.81
INFO:theanets.trainer:RmsProp 29 loss=4.563416 err=4.47 hid1<0.1=97.1 hid1<0.9=99.1 out<0.1=100.0 out<0.9=100.0 acc=5.48
I 2015-09-16 01:28:04 theanets.trainer:169 RmsProp 29 loss=4.563416 err=4.47 hid1<0.1=97.1 hid1<0.9=99.1 out<0.1=100.0 out<0.9=100.0 acc=5.48
INFO:theanets.trainer:RmsProp 30 loss=4.554222 err=4.46 hid1<0.1=97.1 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=5.93
I 2015-09-16 01:28:04 theanets.trainer:169 RmsProp 30 loss=4.554222 err=4.46 hid1<0.1=97.1 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=5.93
INFO:theanets.trainer:validation 3 loss=4.562751 err=4.46 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=5.93 *
I 2015-09-16 01:28:04 theanets.trainer:169 validation 3 loss=4.562751 err=4.46 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=5.93 *
INFO:theanets.trainer:RmsProp 31 loss=4.544036 err=4.45 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=5.74
I 2015-09-16 01:28:04 theanets.trainer:169 RmsProp 31 loss=4.544036 err=4.45 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=5.74
INFO:theanets.trainer:RmsProp 32 loss=4.536437 err=4.44 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.06
I 2015-09-16 01:28:05 theanets.trainer:169 RmsProp 32 loss=4.536437 err=4.44 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.06
INFO:theanets.trainer:RmsProp 33 loss=4.529445 err=4.43 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.09
I 2015-09-16 01:28:05 theanets.trainer:169 RmsProp 33 loss=4.529445 err=4.43 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.09
INFO:theanets.trainer:RmsProp 34 loss=4.521784 err=4.42 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=5.81
I 2015-09-16 01:28:05 theanets.trainer:169 RmsProp 34 loss=4.521784 err=4.42 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=5.81
INFO:theanets.trainer:RmsProp 35 loss=4.517204 err=4.42 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.03
I 2015-09-16 01:28:06 theanets.trainer:169 RmsProp 35 loss=4.517204 err=4.42 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.03
INFO:theanets.trainer:RmsProp 36 loss=4.513229 err=4.41 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=5.94
I 2015-09-16 01:28:06 theanets.trainer:169 RmsProp 36 loss=4.513229 err=4.41 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=5.94
INFO:theanets.trainer:RmsProp 37 loss=4.508374 err=4.41 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.33
I 2015-09-16 01:28:06 theanets.trainer:169 RmsProp 37 loss=4.508374 err=4.41 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.33
INFO:theanets.trainer:RmsProp 38 loss=4.504381 err=4.40 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.16
I 2015-09-16 01:28:07 theanets.trainer:169 RmsProp 38 loss=4.504381 err=4.40 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.16
INFO:theanets.trainer:RmsProp 39 loss=4.500857 err=4.40 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.24
I 2015-09-16 01:28:07 theanets.trainer:169 RmsProp 39 loss=4.500857 err=4.40 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.24
INFO:theanets.trainer:RmsProp 40 loss=4.497306 err=4.40 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.17
I 2015-09-16 01:28:07 theanets.trainer:169 RmsProp 40 loss=4.497306 err=4.40 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.17
INFO:theanets.trainer:validation 4 loss=4.521465 err=4.42 hid1<0.1=97.0 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=6.94
I 2015-09-16 01:28:07 theanets.trainer:169 validation 4 loss=4.521465 err=4.42 hid1<0.1=97.0 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=6.94
INFO:theanets.trainer:RmsProp 41 loss=4.494581 err=4.39 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.25
I 2015-09-16 01:28:08 theanets.trainer:169 RmsProp 41 loss=4.494581 err=4.39 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.25
INFO:theanets.trainer:RmsProp 42 loss=4.491361 err=4.39 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.37
I 2015-09-16 01:28:08 theanets.trainer:169 RmsProp 42 loss=4.491361 err=4.39 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.37
INFO:theanets.trainer:RmsProp 43 loss=4.490266 err=4.39 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.27
I 2015-09-16 01:28:09 theanets.trainer:169 RmsProp 43 loss=4.490266 err=4.39 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.27
INFO:theanets.trainer:RmsProp 44 loss=4.487065 err=4.39 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.17
I 2015-09-16 01:28:09 theanets.trainer:169 RmsProp 44 loss=4.487065 err=4.39 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.17
INFO:theanets.trainer:RmsProp 45 loss=4.484522 err=4.38 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.47
I 2015-09-16 01:28:09 theanets.trainer:169 RmsProp 45 loss=4.484522 err=4.38 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.47
INFO:theanets.trainer:RmsProp 46 loss=4.482367 err=4.38 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.25
I 2015-09-16 01:28:10 theanets.trainer:169 RmsProp 46 loss=4.482367 err=4.38 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.25
INFO:theanets.trainer:RmsProp 47 loss=4.479771 err=4.38 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=5.96
I 2015-09-16 01:28:10 theanets.trainer:169 RmsProp 47 loss=4.479771 err=4.38 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=5.96
INFO:theanets.trainer:RmsProp 48 loss=4.478754 err=4.38 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.43
I 2015-09-16 01:28:10 theanets.trainer:169 RmsProp 48 loss=4.478754 err=4.38 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.43
INFO:theanets.trainer:RmsProp 49 loss=4.475881 err=4.37 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.59
I 2015-09-16 01:28:11 theanets.trainer:169 RmsProp 49 loss=4.475881 err=4.37 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.59
INFO:theanets.trainer:RmsProp 50 loss=4.475798 err=4.37 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.39
I 2015-09-16 01:28:11 theanets.trainer:169 RmsProp 50 loss=4.475798 err=4.37 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.39
INFO:theanets.trainer:validation 5 loss=4.506948 err=4.40 hid1<0.1=97.0 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=6.13 *
I 2015-09-16 01:28:11 theanets.trainer:169 validation 5 loss=4.506948 err=4.40 hid1<0.1=97.0 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=6.13 *
INFO:theanets.trainer:RmsProp 51 loss=4.473212 err=4.37 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.30
I 2015-09-16 01:28:11 theanets.trainer:169 RmsProp 51 loss=4.473212 err=4.37 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.30
INFO:theanets.trainer:RmsProp 52 loss=4.471423 err=4.37 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.23
I 2015-09-16 01:28:12 theanets.trainer:169 RmsProp 52 loss=4.471423 err=4.37 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.23
INFO:theanets.trainer:RmsProp 53 loss=4.469165 err=4.37 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.34
I 2015-09-16 01:28:12 theanets.trainer:169 RmsProp 53 loss=4.469165 err=4.37 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.34
INFO:theanets.trainer:RmsProp 54 loss=4.469367 err=4.37 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.35
I 2015-09-16 01:28:13 theanets.trainer:169 RmsProp 54 loss=4.469367 err=4.37 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.35
INFO:theanets.trainer:RmsProp 55 loss=4.467570 err=4.37 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.42
I 2015-09-16 01:28:13 theanets.trainer:169 RmsProp 55 loss=4.467570 err=4.37 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.42
INFO:theanets.trainer:RmsProp 56 loss=4.465740 err=4.36 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.45
I 2015-09-16 01:28:13 theanets.trainer:169 RmsProp 56 loss=4.465740 err=4.36 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.45
INFO:theanets.trainer:RmsProp 57 loss=4.464414 err=4.36 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.30
I 2015-09-16 01:28:14 theanets.trainer:169 RmsProp 57 loss=4.464414 err=4.36 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=100.0 out<0.9=100.0 acc=6.30
INFO:theanets.trainer:RmsProp 58 loss=4.463217 err=4.36 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.24
I 2015-09-16 01:28:14 theanets.trainer:169 RmsProp 58 loss=4.463217 err=4.36 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.24
INFO:theanets.trainer:RmsProp 59 loss=4.462979 err=4.36 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.39
I 2015-09-16 01:28:14 theanets.trainer:169 RmsProp 59 loss=4.462979 err=4.36 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.39
INFO:theanets.trainer:RmsProp 60 loss=4.461901 err=4.36 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.42
I 2015-09-16 01:28:15 theanets.trainer:169 RmsProp 60 loss=4.461901 err=4.36 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.42
INFO:theanets.trainer:validation 6 loss=4.494724 err=4.39 hid1<0.1=97.0 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=7.28
I 2015-09-16 01:28:15 theanets.trainer:169 validation 6 loss=4.494724 err=4.39 hid1<0.1=97.0 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=7.28
INFO:theanets.trainer:RmsProp 61 loss=4.460941 err=4.36 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.49
I 2015-09-16 01:28:15 theanets.trainer:169 RmsProp 61 loss=4.460941 err=4.36 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.49
INFO:theanets.trainer:RmsProp 62 loss=4.458754 err=4.36 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.45
I 2015-09-16 01:28:15 theanets.trainer:169 RmsProp 62 loss=4.458754 err=4.36 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.45
INFO:theanets.trainer:RmsProp 63 loss=4.457318 err=4.36 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.47
I 2015-09-16 01:28:16 theanets.trainer:169 RmsProp 63 loss=4.457318 err=4.36 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.47
INFO:theanets.trainer:RmsProp 64 loss=4.457949 err=4.36 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.22
I 2015-09-16 01:28:16 theanets.trainer:169 RmsProp 64 loss=4.457949 err=4.36 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.22
INFO:theanets.trainer:RmsProp 65 loss=4.456310 err=4.35 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.18
I 2015-09-16 01:28:16 theanets.trainer:169 RmsProp 65 loss=4.456310 err=4.35 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.18
INFO:theanets.trainer:RmsProp 66 loss=4.455160 err=4.35 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.36
I 2015-09-16 01:28:16 theanets.trainer:169 RmsProp 66 loss=4.455160 err=4.35 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.36
INFO:theanets.trainer:RmsProp 67 loss=4.455313 err=4.35 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.42
I 2015-09-16 01:28:17 theanets.trainer:169 RmsProp 67 loss=4.455313 err=4.35 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.42
INFO:theanets.trainer:RmsProp 68 loss=4.452290 err=4.35 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.71
I 2015-09-16 01:28:17 theanets.trainer:169 RmsProp 68 loss=4.452290 err=4.35 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.71
INFO:theanets.trainer:RmsProp 69 loss=4.453828 err=4.35 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.05
I 2015-09-16 01:28:17 theanets.trainer:169 RmsProp 69 loss=4.453828 err=4.35 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.05
INFO:theanets.trainer:RmsProp 70 loss=4.451632 err=4.35 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.37
I 2015-09-16 01:28:18 theanets.trainer:169 RmsProp 70 loss=4.451632 err=4.35 hid1<0.1=97.0 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.37
INFO:theanets.trainer:validation 7 loss=4.491538 err=4.39 hid1<0.1=96.9 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=6.88
I 2015-09-16 01:28:18 theanets.trainer:169 validation 7 loss=4.491538 err=4.39 hid1<0.1=96.9 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=6.88
INFO:theanets.trainer:RmsProp 71 loss=4.452420 err=4.35 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.54
I 2015-09-16 01:28:18 theanets.trainer:169 RmsProp 71 loss=4.452420 err=4.35 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.54
INFO:theanets.trainer:RmsProp 72 loss=4.450191 err=4.35 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.42
I 2015-09-16 01:28:18 theanets.trainer:169 RmsProp 72 loss=4.450191 err=4.35 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.42
INFO:theanets.trainer:RmsProp 73 loss=4.448815 err=4.35 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.49
I 2015-09-16 01:28:19 theanets.trainer:169 RmsProp 73 loss=4.448815 err=4.35 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.49
INFO:theanets.trainer:RmsProp 74 loss=4.448070 err=4.35 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.78
I 2015-09-16 01:28:19 theanets.trainer:169 RmsProp 74 loss=4.448070 err=4.35 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.78
INFO:theanets.trainer:RmsProp 75 loss=4.447124 err=4.34 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.70
I 2015-09-16 01:28:19 theanets.trainer:169 RmsProp 75 loss=4.447124 err=4.34 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.70
INFO:theanets.trainer:RmsProp 76 loss=4.444724 err=4.34 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.65
I 2015-09-16 01:28:19 theanets.trainer:169 RmsProp 76 loss=4.444724 err=4.34 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.65
INFO:theanets.trainer:RmsProp 77 loss=4.443238 err=4.34 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.56
I 2015-09-16 01:28:20 theanets.trainer:169 RmsProp 77 loss=4.443238 err=4.34 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.56
INFO:theanets.trainer:RmsProp 78 loss=4.441787 err=4.34 hid1<0.1=96.9 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.65
I 2015-09-16 01:28:20 theanets.trainer:169 RmsProp 78 loss=4.441787 err=4.34 hid1<0.1=96.9 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.65
INFO:theanets.trainer:RmsProp 79 loss=4.440745 err=4.34 hid1<0.1=96.9 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.95
I 2015-09-16 01:28:20 theanets.trainer:169 RmsProp 79 loss=4.440745 err=4.34 hid1<0.1=96.9 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.95
INFO:theanets.trainer:RmsProp 80 loss=4.438402 err=4.33 hid1<0.1=96.9 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.91
I 2015-09-16 01:28:20 theanets.trainer:169 RmsProp 80 loss=4.438402 err=4.33 hid1<0.1=96.9 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.91
INFO:theanets.trainer:validation 8 loss=4.477842 err=4.37 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.44
I 2015-09-16 01:28:21 theanets.trainer:169 validation 8 loss=4.477842 err=4.37 hid1<0.1=96.9 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=6.44
INFO:theanets.trainer:RmsProp 81 loss=4.437894 err=4.33 hid1<0.1=96.9 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.69
I 2015-09-16 01:28:21 theanets.trainer:169 RmsProp 81 loss=4.437894 err=4.33 hid1<0.1=96.9 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.69
INFO:theanets.trainer:RmsProp 82 loss=4.436222 err=4.33 hid1<0.1=96.9 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.59
I 2015-09-16 01:28:21 theanets.trainer:169 RmsProp 82 loss=4.436222 err=4.33 hid1<0.1=96.9 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.59
INFO:theanets.trainer:RmsProp 83 loss=4.434881 err=4.33 hid1<0.1=96.9 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.85
I 2015-09-16 01:28:21 theanets.trainer:169 RmsProp 83 loss=4.434881 err=4.33 hid1<0.1=96.9 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.85
INFO:theanets.trainer:RmsProp 84 loss=4.432049 err=4.33 hid1<0.1=96.8 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.83
I 2015-09-16 01:28:22 theanets.trainer:169 RmsProp 84 loss=4.432049 err=4.33 hid1<0.1=96.8 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.83
INFO:theanets.trainer:RmsProp 85 loss=4.431212 err=4.33 hid1<0.1=96.8 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.79
I 2015-09-16 01:28:22 theanets.trainer:169 RmsProp 85 loss=4.431212 err=4.33 hid1<0.1=96.8 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.79
INFO:theanets.trainer:RmsProp 86 loss=4.429122 err=4.32 hid1<0.1=96.8 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=7.07
I 2015-09-16 01:28:22 theanets.trainer:169 RmsProp 86 loss=4.429122 err=4.32 hid1<0.1=96.8 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=7.07
INFO:theanets.trainer:RmsProp 87 loss=4.427561 err=4.32 hid1<0.1=96.8 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.69
I 2015-09-16 01:28:23 theanets.trainer:169 RmsProp 87 loss=4.427561 err=4.32 hid1<0.1=96.8 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.69
INFO:theanets.trainer:RmsProp 88 loss=4.427006 err=4.32 hid1<0.1=96.8 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.78
I 2015-09-16 01:28:23 theanets.trainer:169 RmsProp 88 loss=4.427006 err=4.32 hid1<0.1=96.8 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.78
INFO:theanets.trainer:RmsProp 89 loss=4.422358 err=4.32 hid1<0.1=96.8 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.84
I 2015-09-16 01:28:23 theanets.trainer:169 RmsProp 89 loss=4.422358 err=4.32 hid1<0.1=96.8 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.84
INFO:theanets.trainer:RmsProp 90 loss=4.421309 err=4.32 hid1<0.1=96.8 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.69
I 2015-09-16 01:28:24 theanets.trainer:169 RmsProp 90 loss=4.421309 err=4.32 hid1<0.1=96.8 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.69
INFO:theanets.trainer:validation 9 loss=4.464902 err=4.36 hid1<0.1=96.8 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.15
I 2015-09-16 01:28:24 theanets.trainer:169 validation 9 loss=4.464902 err=4.36 hid1<0.1=96.8 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.15
INFO:theanets.trainer:RmsProp 91 loss=4.420060 err=4.31 hid1<0.1=96.8 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=7.03
I 2015-09-16 01:28:24 theanets.trainer:169 RmsProp 91 loss=4.420060 err=4.31 hid1<0.1=96.8 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=7.03
INFO:theanets.trainer:RmsProp 92 loss=4.417624 err=4.31 hid1<0.1=96.8 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=7.10
I 2015-09-16 01:28:25 theanets.trainer:169 RmsProp 92 loss=4.417624 err=4.31 hid1<0.1=96.8 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=7.10
INFO:theanets.trainer:RmsProp 93 loss=4.414837 err=4.31 hid1<0.1=96.8 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.83
I 2015-09-16 01:28:25 theanets.trainer:169 RmsProp 93 loss=4.414837 err=4.31 hid1<0.1=96.8 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.83
INFO:theanets.trainer:RmsProp 94 loss=4.413218 err=4.31 hid1<0.1=96.8 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=7.05
I 2015-09-16 01:28:25 theanets.trainer:169 RmsProp 94 loss=4.413218 err=4.31 hid1<0.1=96.8 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=7.05
INFO:theanets.trainer:RmsProp 95 loss=4.411298 err=4.30 hid1<0.1=96.8 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.99
I 2015-09-16 01:28:25 theanets.trainer:169 RmsProp 95 loss=4.411298 err=4.30 hid1<0.1=96.8 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.99
INFO:theanets.trainer:RmsProp 96 loss=4.408932 err=4.30 hid1<0.1=96.7 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.86
I 2015-09-16 01:28:26 theanets.trainer:169 RmsProp 96 loss=4.408932 err=4.30 hid1<0.1=96.7 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=6.86
INFO:theanets.trainer:RmsProp 97 loss=4.405703 err=4.30 hid1<0.1=96.6 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=7.18
I 2015-09-16 01:28:26 theanets.trainer:169 RmsProp 97 loss=4.405703 err=4.30 hid1<0.1=96.6 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=7.18
INFO:theanets.trainer:RmsProp 98 loss=4.403493 err=4.29 hid1<0.1=96.6 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.20
I 2015-09-16 01:28:26 theanets.trainer:169 RmsProp 98 loss=4.403493 err=4.29 hid1<0.1=96.6 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.20
INFO:theanets.trainer:RmsProp 99 loss=4.399036 err=4.29 hid1<0.1=96.6 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=7.54
I 2015-09-16 01:28:27 theanets.trainer:169 RmsProp 99 loss=4.399036 err=4.29 hid1<0.1=96.6 hid1<0.9=99.1 out<0.1=99.9 out<0.9=100.0 acc=7.54
INFO:theanets.trainer:RmsProp 100 loss=4.395781 err=4.29 hid1<0.1=96.5 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.21
I 2015-09-16 01:28:27 theanets.trainer:169 RmsProp 100 loss=4.395781 err=4.29 hid1<0.1=96.5 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.21
INFO:theanets.trainer:validation 10 loss=4.439670 err=4.33 hid1<0.1=96.6 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.66 *
I 2015-09-16 01:28:27 theanets.trainer:169 validation 10 loss=4.439670 err=4.33 hid1<0.1=96.6 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.66 *
INFO:theanets.trainer:RmsProp 101 loss=4.391821 err=4.28 hid1<0.1=96.6 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.34
I 2015-09-16 01:28:27 theanets.trainer:169 RmsProp 101 loss=4.391821 err=4.28 hid1<0.1=96.6 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.34
INFO:theanets.trainer:RmsProp 102 loss=4.389145 err=4.28 hid1<0.1=96.6 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.60
I 2015-09-16 01:28:28 theanets.trainer:169 RmsProp 102 loss=4.389145 err=4.28 hid1<0.1=96.6 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.60
INFO:theanets.trainer:RmsProp 103 loss=4.386158 err=4.28 hid1<0.1=96.6 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.52
I 2015-09-16 01:28:28 theanets.trainer:169 RmsProp 103 loss=4.386158 err=4.28 hid1<0.1=96.6 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.52
INFO:theanets.trainer:RmsProp 104 loss=4.383983 err=4.27 hid1<0.1=96.6 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.77
I 2015-09-16 01:28:28 theanets.trainer:169 RmsProp 104 loss=4.383983 err=4.27 hid1<0.1=96.6 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.77
INFO:theanets.trainer:RmsProp 105 loss=4.380274 err=4.27 hid1<0.1=96.6 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.43
I 2015-09-16 01:28:29 theanets.trainer:169 RmsProp 105 loss=4.380274 err=4.27 hid1<0.1=96.6 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.43
INFO:theanets.trainer:RmsProp 106 loss=4.377556 err=4.27 hid1<0.1=96.6 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.94
I 2015-09-16 01:28:29 theanets.trainer:169 RmsProp 106 loss=4.377556 err=4.27 hid1<0.1=96.6 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.94
INFO:theanets.trainer:RmsProp 107 loss=4.375278 err=4.26 hid1<0.1=96.6 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.57
I 2015-09-16 01:28:29 theanets.trainer:169 RmsProp 107 loss=4.375278 err=4.26 hid1<0.1=96.6 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.57
INFO:theanets.trainer:RmsProp 108 loss=4.372415 err=4.26 hid1<0.1=96.6 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=8.12
I 2015-09-16 01:28:29 theanets.trainer:169 RmsProp 108 loss=4.372415 err=4.26 hid1<0.1=96.6 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=8.12
INFO:theanets.trainer:RmsProp 109 loss=4.371431 err=4.26 hid1<0.1=96.6 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=8.00
I 2015-09-16 01:28:30 theanets.trainer:169 RmsProp 109 loss=4.371431 err=4.26 hid1<0.1=96.6 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=8.00
INFO:theanets.trainer:RmsProp 110 loss=4.368392 err=4.26 hid1<0.1=96.5 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.95
I 2015-09-16 01:28:30 theanets.trainer:169 RmsProp 110 loss=4.368392 err=4.26 hid1<0.1=96.5 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.95
INFO:theanets.trainer:validation 11 loss=4.419380 err=4.30 hid1<0.1=96.5 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.93
I 2015-09-16 01:28:30 theanets.trainer:169 validation 11 loss=4.419380 err=4.30 hid1<0.1=96.5 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.93
INFO:theanets.trainer:RmsProp 111 loss=4.366743 err=4.25 hid1<0.1=96.6 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.94
I 2015-09-16 01:28:30 theanets.trainer:169 RmsProp 111 loss=4.366743 err=4.25 hid1<0.1=96.6 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.94
INFO:theanets.trainer:RmsProp 112 loss=4.364198 err=4.25 hid1<0.1=96.5 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=8.19
I 2015-09-16 01:28:31 theanets.trainer:169 RmsProp 112 loss=4.364198 err=4.25 hid1<0.1=96.5 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=8.19
INFO:theanets.trainer:RmsProp 113 loss=4.362275 err=4.25 hid1<0.1=96.5 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.99
I 2015-09-16 01:28:31 theanets.trainer:169 RmsProp 113 loss=4.362275 err=4.25 hid1<0.1=96.5 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.99
INFO:theanets.trainer:RmsProp 114 loss=4.360933 err=4.25 hid1<0.1=96.5 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=8.00
I 2015-09-16 01:28:31 theanets.trainer:169 RmsProp 114 loss=4.360933 err=4.25 hid1<0.1=96.5 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=8.00
INFO:theanets.trainer:RmsProp 115 loss=4.359538 err=4.25 hid1<0.1=96.5 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.87
I 2015-09-16 01:28:31 theanets.trainer:169 RmsProp 115 loss=4.359538 err=4.25 hid1<0.1=96.5 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.87
INFO:theanets.trainer:RmsProp 116 loss=4.356013 err=4.24 hid1<0.1=96.5 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.84
I 2015-09-16 01:28:32 theanets.trainer:169 RmsProp 116 loss=4.356013 err=4.24 hid1<0.1=96.5 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.84
INFO:theanets.trainer:RmsProp 117 loss=4.354908 err=4.24 hid1<0.1=96.5 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.82
I 2015-09-16 01:28:32 theanets.trainer:169 RmsProp 117 loss=4.354908 err=4.24 hid1<0.1=96.5 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=7.82
INFO:theanets.trainer:RmsProp 118 loss=4.351700 err=4.24 hid1<0.1=96.5 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=8.24
I 2015-09-16 01:28:32 theanets.trainer:169 RmsProp 118 loss=4.351700 err=4.24 hid1<0.1=96.5 hid1<0.9=99.0 out<0.1=99.9 out<0.9=100.0 acc=8.24
INFO:theanets.trainer:RmsProp 119 loss=4.349983 err=4.24 hid1<0.1=96.5 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.11
I 2015-09-16 01:28:33 theanets.trainer:169 RmsProp 119 loss=4.349983 err=4.24 hid1<0.1=96.5 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.11
INFO:theanets.trainer:RmsProp 120 loss=4.348404 err=4.23 hid1<0.1=96.5 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.04
I 2015-09-16 01:28:33 theanets.trainer:169 RmsProp 120 loss=4.348404 err=4.23 hid1<0.1=96.5 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.04
INFO:theanets.trainer:validation 12 loss=4.398981 err=4.28 hid1<0.1=96.4 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.47
I 2015-09-16 01:28:33 theanets.trainer:169 validation 12 loss=4.398981 err=4.28 hid1<0.1=96.4 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.47
INFO:theanets.trainer:RmsProp 121 loss=4.347010 err=4.23 hid1<0.1=96.5 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.41
I 2015-09-16 01:28:33 theanets.trainer:169 RmsProp 121 loss=4.347010 err=4.23 hid1<0.1=96.5 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.41
INFO:theanets.trainer:RmsProp 122 loss=4.345991 err=4.23 hid1<0.1=96.5 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.13
I 2015-09-16 01:28:33 theanets.trainer:169 RmsProp 122 loss=4.345991 err=4.23 hid1<0.1=96.5 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.13
INFO:theanets.trainer:RmsProp 123 loss=4.343024 err=4.23 hid1<0.1=96.5 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=7.96
I 2015-09-16 01:28:34 theanets.trainer:169 RmsProp 123 loss=4.343024 err=4.23 hid1<0.1=96.5 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=7.96
INFO:theanets.trainer:RmsProp 124 loss=4.341284 err=4.23 hid1<0.1=96.5 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.05
I 2015-09-16 01:28:34 theanets.trainer:169 RmsProp 124 loss=4.341284 err=4.23 hid1<0.1=96.5 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.05
INFO:theanets.trainer:RmsProp 125 loss=4.339443 err=4.22 hid1<0.1=96.5 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.21
I 2015-09-16 01:28:34 theanets.trainer:169 RmsProp 125 loss=4.339443 err=4.22 hid1<0.1=96.5 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.21
INFO:theanets.trainer:RmsProp 126 loss=4.338030 err=4.22 hid1<0.1=96.4 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.33
I 2015-09-16 01:28:35 theanets.trainer:169 RmsProp 126 loss=4.338030 err=4.22 hid1<0.1=96.4 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.33
INFO:theanets.trainer:RmsProp 127 loss=4.336356 err=4.22 hid1<0.1=96.4 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.32
I 2015-09-16 01:28:35 theanets.trainer:169 RmsProp 127 loss=4.336356 err=4.22 hid1<0.1=96.4 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.32
INFO:theanets.trainer:RmsProp 128 loss=4.333790 err=4.22 hid1<0.1=96.4 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.07
I 2015-09-16 01:28:35 theanets.trainer:169 RmsProp 128 loss=4.333790 err=4.22 hid1<0.1=96.4 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.07
INFO:theanets.trainer:RmsProp 129 loss=4.332059 err=4.21 hid1<0.1=96.4 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.44
I 2015-09-16 01:28:35 theanets.trainer:169 RmsProp 129 loss=4.332059 err=4.21 hid1<0.1=96.4 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.44
INFO:theanets.trainer:RmsProp 130 loss=4.332208 err=4.21 hid1<0.1=96.4 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.25
I 2015-09-16 01:28:36 theanets.trainer:169 RmsProp 130 loss=4.332208 err=4.21 hid1<0.1=96.4 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.25
INFO:theanets.trainer:validation 13 loss=4.388001 err=4.27 hid1<0.1=96.4 hid1<0.9=98.8 out<0.1=99.9 out<0.9=100.0 acc=8.13 *
I 2015-09-16 01:28:36 theanets.trainer:169 validation 13 loss=4.388001 err=4.27 hid1<0.1=96.4 hid1<0.9=98.8 out<0.1=99.9 out<0.9=100.0 acc=8.13 *
INFO:theanets.trainer:RmsProp 131 loss=4.328955 err=4.21 hid1<0.1=96.4 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.33
I 2015-09-16 01:28:36 theanets.trainer:169 RmsProp 131 loss=4.328955 err=4.21 hid1<0.1=96.4 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.33
INFO:theanets.trainer:RmsProp 132 loss=4.327947 err=4.21 hid1<0.1=96.4 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.31
I 2015-09-16 01:28:36 theanets.trainer:169 RmsProp 132 loss=4.327947 err=4.21 hid1<0.1=96.4 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.31
INFO:theanets.trainer:RmsProp 133 loss=4.326163 err=4.21 hid1<0.1=96.4 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.62
I 2015-09-16 01:28:37 theanets.trainer:169 RmsProp 133 loss=4.326163 err=4.21 hid1<0.1=96.4 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.62
INFO:theanets.trainer:RmsProp 134 loss=4.323984 err=4.20 hid1<0.1=96.4 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.32
I 2015-09-16 01:28:37 theanets.trainer:169 RmsProp 134 loss=4.323984 err=4.20 hid1<0.1=96.4 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.32
INFO:theanets.trainer:RmsProp 135 loss=4.322527 err=4.20 hid1<0.1=96.4 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.08
I 2015-09-16 01:28:37 theanets.trainer:169 RmsProp 135 loss=4.322527 err=4.20 hid1<0.1=96.4 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.08
INFO:theanets.trainer:RmsProp 136 loss=4.322453 err=4.20 hid1<0.1=96.4 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.12
I 2015-09-16 01:28:38 theanets.trainer:169 RmsProp 136 loss=4.322453 err=4.20 hid1<0.1=96.4 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.12
INFO:theanets.trainer:RmsProp 137 loss=4.319496 err=4.20 hid1<0.1=96.3 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.64
I 2015-09-16 01:28:38 theanets.trainer:169 RmsProp 137 loss=4.319496 err=4.20 hid1<0.1=96.3 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.64
INFO:theanets.trainer:RmsProp 138 loss=4.318100 err=4.20 hid1<0.1=96.4 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.12
I 2015-09-16 01:28:38 theanets.trainer:169 RmsProp 138 loss=4.318100 err=4.20 hid1<0.1=96.4 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.12
INFO:theanets.trainer:RmsProp 139 loss=4.316503 err=4.20 hid1<0.1=96.3 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.83
I 2015-09-16 01:28:39 theanets.trainer:169 RmsProp 139 loss=4.316503 err=4.20 hid1<0.1=96.3 hid1<0.9=98.9 out<0.1=99.9 out<0.9=100.0 acc=8.83
INFO:theanets.trainer:RmsProp 140 loss=4.314840 err=4.19 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.9 out<0.9=100.0 acc=8.37
I 2015-09-16 01:28:39 theanets.trainer:169 RmsProp 140 loss=4.314840 err=4.19 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.9 out<0.9=100.0 acc=8.37
INFO:theanets.trainer:validation 14 loss=4.372751 err=4.25 hid1<0.1=96.2 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.54
I 2015-09-16 01:28:39 theanets.trainer:169 validation 14 loss=4.372751 err=4.25 hid1<0.1=96.2 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.54
INFO:theanets.trainer:RmsProp 141 loss=4.314042 err=4.19 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.9 out<0.9=100.0 acc=8.51
I 2015-09-16 01:28:39 theanets.trainer:169 RmsProp 141 loss=4.314042 err=4.19 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.9 out<0.9=100.0 acc=8.51
INFO:theanets.trainer:RmsProp 142 loss=4.312453 err=4.19 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.9 out<0.9=100.0 acc=8.32
I 2015-09-16 01:28:40 theanets.trainer:169 RmsProp 142 loss=4.312453 err=4.19 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.9 out<0.9=100.0 acc=8.32
INFO:theanets.trainer:RmsProp 143 loss=4.310238 err=4.19 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.9 out<0.9=100.0 acc=8.21
I 2015-09-16 01:28:40 theanets.trainer:169 RmsProp 143 loss=4.310238 err=4.19 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.9 out<0.9=100.0 acc=8.21
INFO:theanets.trainer:RmsProp 144 loss=4.310103 err=4.19 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.9 out<0.9=100.0 acc=8.47
I 2015-09-16 01:28:40 theanets.trainer:169 RmsProp 144 loss=4.310103 err=4.19 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.9 out<0.9=100.0 acc=8.47
INFO:theanets.trainer:RmsProp 145 loss=4.309559 err=4.19 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.9 out<0.9=100.0 acc=8.39
I 2015-09-16 01:28:41 theanets.trainer:169 RmsProp 145 loss=4.309559 err=4.19 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.9 out<0.9=100.0 acc=8.39
INFO:theanets.trainer:RmsProp 146 loss=4.307913 err=4.19 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.9 out<0.9=100.0 acc=8.41
I 2015-09-16 01:28:41 theanets.trainer:169 RmsProp 146 loss=4.307913 err=4.19 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.9 out<0.9=100.0 acc=8.41
INFO:theanets.trainer:RmsProp 147 loss=4.306717 err=4.19 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.48
I 2015-09-16 01:28:41 theanets.trainer:169 RmsProp 147 loss=4.306717 err=4.19 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.48
INFO:theanets.trainer:RmsProp 148 loss=4.304955 err=4.18 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.38
I 2015-09-16 01:28:42 theanets.trainer:169 RmsProp 148 loss=4.304955 err=4.18 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.38
INFO:theanets.trainer:RmsProp 149 loss=4.304820 err=4.18 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.24
I 2015-09-16 01:28:42 theanets.trainer:169 RmsProp 149 loss=4.304820 err=4.18 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.24
INFO:theanets.trainer:RmsProp 150 loss=4.302747 err=4.18 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.65
I 2015-09-16 01:28:42 theanets.trainer:169 RmsProp 150 loss=4.302747 err=4.18 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.65
INFO:theanets.trainer:validation 15 loss=4.369592 err=4.25 hid1<0.1=96.2 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.44
I 2015-09-16 01:28:42 theanets.trainer:169 validation 15 loss=4.369592 err=4.25 hid1<0.1=96.2 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.44
INFO:theanets.trainer:RmsProp 151 loss=4.302832 err=4.18 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.50
I 2015-09-16 01:28:43 theanets.trainer:169 RmsProp 151 loss=4.302832 err=4.18 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.50
INFO:theanets.trainer:RmsProp 152 loss=4.301625 err=4.18 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.51
I 2015-09-16 01:28:43 theanets.trainer:169 RmsProp 152 loss=4.301625 err=4.18 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.51
INFO:theanets.trainer:RmsProp 153 loss=4.300034 err=4.18 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.55
I 2015-09-16 01:28:43 theanets.trainer:169 RmsProp 153 loss=4.300034 err=4.18 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.55
INFO:theanets.trainer:RmsProp 154 loss=4.300277 err=4.18 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.64
I 2015-09-16 01:28:44 theanets.trainer:169 RmsProp 154 loss=4.300277 err=4.18 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.64
INFO:theanets.trainer:RmsProp 155 loss=4.297411 err=4.17 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.80
I 2015-09-16 01:28:44 theanets.trainer:169 RmsProp 155 loss=4.297411 err=4.17 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.80
INFO:theanets.trainer:RmsProp 156 loss=4.298218 err=4.17 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.57
I 2015-09-16 01:28:44 theanets.trainer:169 RmsProp 156 loss=4.298218 err=4.17 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.57
INFO:theanets.trainer:RmsProp 157 loss=4.294830 err=4.17 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.64
I 2015-09-16 01:28:45 theanets.trainer:169 RmsProp 157 loss=4.294830 err=4.17 hid1<0.1=96.3 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.64
INFO:theanets.trainer:RmsProp 158 loss=4.294913 err=4.17 hid1<0.1=96.2 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.65
I 2015-09-16 01:28:45 theanets.trainer:169 RmsProp 158 loss=4.294913 err=4.17 hid1<0.1=96.2 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.65
INFO:theanets.trainer:RmsProp 159 loss=4.294403 err=4.17 hid1<0.1=96.2 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.47
I 2015-09-16 01:28:45 theanets.trainer:169 RmsProp 159 loss=4.294403 err=4.17 hid1<0.1=96.2 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.47
INFO:theanets.trainer:RmsProp 160 loss=4.292911 err=4.17 hid1<0.1=96.2 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.88
I 2015-09-16 01:28:46 theanets.trainer:169 RmsProp 160 loss=4.292911 err=4.17 hid1<0.1=96.2 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.88
INFO:theanets.trainer:validation 16 loss=4.363050 err=4.23 hid1<0.1=96.2 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.30
I 2015-09-16 01:28:46 theanets.trainer:169 validation 16 loss=4.363050 err=4.23 hid1<0.1=96.2 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.30
INFO:theanets.trainer:RmsProp 161 loss=4.291275 err=4.17 hid1<0.1=96.2 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.38
I 2015-09-16 01:28:46 theanets.trainer:169 RmsProp 161 loss=4.291275 err=4.17 hid1<0.1=96.2 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.38
INFO:theanets.trainer:RmsProp 162 loss=4.290847 err=4.17 hid1<0.1=96.2 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.53
I 2015-09-16 01:28:47 theanets.trainer:169 RmsProp 162 loss=4.290847 err=4.17 hid1<0.1=96.2 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.53
INFO:theanets.trainer:RmsProp 163 loss=4.289896 err=4.17 hid1<0.1=96.2 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.58
I 2015-09-16 01:28:47 theanets.trainer:169 RmsProp 163 loss=4.289896 err=4.17 hid1<0.1=96.2 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.58
INFO:theanets.trainer:RmsProp 164 loss=4.288615 err=4.16 hid1<0.1=96.2 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.44
I 2015-09-16 01:28:47 theanets.trainer:169 RmsProp 164 loss=4.288615 err=4.16 hid1<0.1=96.2 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.44
INFO:theanets.trainer:RmsProp 165 loss=4.287486 err=4.16 hid1<0.1=96.2 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=9.05
I 2015-09-16 01:28:48 theanets.trainer:169 RmsProp 165 loss=4.287486 err=4.16 hid1<0.1=96.2 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=9.05
INFO:theanets.trainer:RmsProp 166 loss=4.285734 err=4.16 hid1<0.1=96.2 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.57
I 2015-09-16 01:28:48 theanets.trainer:169 RmsProp 166 loss=4.285734 err=4.16 hid1<0.1=96.2 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.57
INFO:theanets.trainer:RmsProp 167 loss=4.286601 err=4.16 hid1<0.1=96.2 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.48
I 2015-09-16 01:28:49 theanets.trainer:169 RmsProp 167 loss=4.286601 err=4.16 hid1<0.1=96.2 hid1<0.9=98.8 out<0.1=99.8 out<0.9=100.0 acc=8.48
INFO:theanets.trainer:RmsProp 168 loss=4.285649 err=4.16 hid1<0.1=96.2 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.71
I 2015-09-16 01:28:49 theanets.trainer:169 RmsProp 168 loss=4.285649 err=4.16 hid1<0.1=96.2 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.71
INFO:theanets.trainer:RmsProp 169 loss=4.284604 err=4.16 hid1<0.1=96.2 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.61
I 2015-09-16 01:28:49 theanets.trainer:169 RmsProp 169 loss=4.284604 err=4.16 hid1<0.1=96.2 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.61
INFO:theanets.trainer:RmsProp 170 loss=4.283633 err=4.16 hid1<0.1=96.1 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.38
I 2015-09-16 01:28:50 theanets.trainer:169 RmsProp 170 loss=4.283633 err=4.16 hid1<0.1=96.1 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.38
INFO:theanets.trainer:validation 17 loss=4.351898 err=4.22 hid1<0.1=96.1 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.41
I 2015-09-16 01:28:50 theanets.trainer:169 validation 17 loss=4.351898 err=4.22 hid1<0.1=96.1 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.41
INFO:theanets.trainer:RmsProp 171 loss=4.282375 err=4.16 hid1<0.1=96.2 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.56
I 2015-09-16 01:28:50 theanets.trainer:169 RmsProp 171 loss=4.282375 err=4.16 hid1<0.1=96.2 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.56
INFO:theanets.trainer:RmsProp 172 loss=4.281419 err=4.16 hid1<0.1=96.1 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.29
I 2015-09-16 01:28:51 theanets.trainer:169 RmsProp 172 loss=4.281419 err=4.16 hid1<0.1=96.1 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.29
INFO:theanets.trainer:RmsProp 173 loss=4.279759 err=4.15 hid1<0.1=96.1 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.53
I 2015-09-16 01:28:51 theanets.trainer:169 RmsProp 173 loss=4.279759 err=4.15 hid1<0.1=96.1 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.53
INFO:theanets.trainer:RmsProp 174 loss=4.281221 err=4.16 hid1<0.1=96.1 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.61
I 2015-09-16 01:28:52 theanets.trainer:169 RmsProp 174 loss=4.281221 err=4.16 hid1<0.1=96.1 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.61
INFO:theanets.trainer:RmsProp 175 loss=4.278079 err=4.15 hid1<0.1=96.1 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.33
I 2015-09-16 01:28:52 theanets.trainer:169 RmsProp 175 loss=4.278079 err=4.15 hid1<0.1=96.1 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.33
INFO:theanets.trainer:RmsProp 176 loss=4.277175 err=4.15 hid1<0.1=96.1 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.61
I 2015-09-16 01:28:52 theanets.trainer:169 RmsProp 176 loss=4.277175 err=4.15 hid1<0.1=96.1 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.61
INFO:theanets.trainer:RmsProp 177 loss=4.276791 err=4.15 hid1<0.1=96.1 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.97
I 2015-09-16 01:28:53 theanets.trainer:169 RmsProp 177 loss=4.276791 err=4.15 hid1<0.1=96.1 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.97
INFO:theanets.trainer:RmsProp 178 loss=4.275241 err=4.15 hid1<0.1=96.1 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.75
I 2015-09-16 01:28:53 theanets.trainer:169 RmsProp 178 loss=4.275241 err=4.15 hid1<0.1=96.1 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.75
INFO:theanets.trainer:RmsProp 179 loss=4.275324 err=4.15 hid1<0.1=96.1 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.34
I 2015-09-16 01:28:54 theanets.trainer:169 RmsProp 179 loss=4.275324 err=4.15 hid1<0.1=96.1 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.34
INFO:theanets.trainer:RmsProp 180 loss=4.274080 err=4.15 hid1<0.1=96.0 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.37
I 2015-09-16 01:28:54 theanets.trainer:169 RmsProp 180 loss=4.274080 err=4.15 hid1<0.1=96.0 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.37
INFO:theanets.trainer:validation 18 loss=4.343259 err=4.21 hid1<0.1=96.0 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=9.02 *
I 2015-09-16 01:28:54 theanets.trainer:169 validation 18 loss=4.343259 err=4.21 hid1<0.1=96.0 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=9.02 *
INFO:theanets.trainer:RmsProp 181 loss=4.274193 err=4.15 hid1<0.1=96.1 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=9.05
I 2015-09-16 01:28:54 theanets.trainer:169 RmsProp 181 loss=4.274193 err=4.15 hid1<0.1=96.1 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=9.05
INFO:theanets.trainer:RmsProp 182 loss=4.271129 err=4.14 hid1<0.1=96.0 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.70
I 2015-09-16 01:28:55 theanets.trainer:169 RmsProp 182 loss=4.271129 err=4.14 hid1<0.1=96.0 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.70
INFO:theanets.trainer:RmsProp 183 loss=4.272237 err=4.14 hid1<0.1=96.0 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.81
I 2015-09-16 01:28:55 theanets.trainer:169 RmsProp 183 loss=4.272237 err=4.14 hid1<0.1=96.0 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.81
INFO:theanets.trainer:RmsProp 184 loss=4.269933 err=4.14 hid1<0.1=96.0 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.65
I 2015-09-16 01:28:55 theanets.trainer:169 RmsProp 184 loss=4.269933 err=4.14 hid1<0.1=96.0 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.65
INFO:theanets.trainer:RmsProp 185 loss=4.269090 err=4.14 hid1<0.1=96.0 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.74
I 2015-09-16 01:28:56 theanets.trainer:169 RmsProp 185 loss=4.269090 err=4.14 hid1<0.1=96.0 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.74
INFO:theanets.trainer:RmsProp 186 loss=4.268762 err=4.14 hid1<0.1=96.0 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=9.00
I 2015-09-16 01:28:56 theanets.trainer:169 RmsProp 186 loss=4.268762 err=4.14 hid1<0.1=96.0 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=9.00
INFO:theanets.trainer:RmsProp 187 loss=4.267376 err=4.14 hid1<0.1=96.0 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.53
I 2015-09-16 01:28:57 theanets.trainer:169 RmsProp 187 loss=4.267376 err=4.14 hid1<0.1=96.0 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.53
INFO:theanets.trainer:RmsProp 188 loss=4.263378 err=4.13 hid1<0.1=96.0 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.84
I 2015-09-16 01:28:57 theanets.trainer:169 RmsProp 188 loss=4.263378 err=4.13 hid1<0.1=96.0 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.84
INFO:theanets.trainer:RmsProp 189 loss=4.264361 err=4.13 hid1<0.1=95.9 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.58
I 2015-09-16 01:28:57 theanets.trainer:169 RmsProp 189 loss=4.264361 err=4.13 hid1<0.1=95.9 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.58
INFO:theanets.trainer:RmsProp 190 loss=4.263399 err=4.13 hid1<0.1=95.9 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.76
I 2015-09-16 01:28:58 theanets.trainer:169 RmsProp 190 loss=4.263399 err=4.13 hid1<0.1=95.9 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.76
INFO:theanets.trainer:validation 19 loss=4.337478 err=4.20 hid1<0.1=95.9 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=8.79
I 2015-09-16 01:28:58 theanets.trainer:169 validation 19 loss=4.337478 err=4.20 hid1<0.1=95.9 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=8.79
INFO:theanets.trainer:RmsProp 191 loss=4.262836 err=4.13 hid1<0.1=95.9 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.64
I 2015-09-16 01:28:58 theanets.trainer:169 RmsProp 191 loss=4.262836 err=4.13 hid1<0.1=95.9 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.64
INFO:theanets.trainer:RmsProp 192 loss=4.261055 err=4.13 hid1<0.1=95.9 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.93
I 2015-09-16 01:28:59 theanets.trainer:169 RmsProp 192 loss=4.261055 err=4.13 hid1<0.1=95.9 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.93
INFO:theanets.trainer:RmsProp 193 loss=4.259257 err=4.13 hid1<0.1=95.9 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.73
I 2015-09-16 01:28:59 theanets.trainer:169 RmsProp 193 loss=4.259257 err=4.13 hid1<0.1=95.9 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.73
INFO:theanets.trainer:RmsProp 194 loss=4.258867 err=4.13 hid1<0.1=95.9 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.77
I 2015-09-16 01:29:00 theanets.trainer:169 RmsProp 194 loss=4.258867 err=4.13 hid1<0.1=95.9 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.77
INFO:theanets.trainer:RmsProp 195 loss=4.257860 err=4.13 hid1<0.1=95.9 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.81
I 2015-09-16 01:29:01 theanets.trainer:169 RmsProp 195 loss=4.257860 err=4.13 hid1<0.1=95.9 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.81
INFO:theanets.trainer:RmsProp 196 loss=4.256283 err=4.12 hid1<0.1=95.9 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.76
I 2015-09-16 01:29:01 theanets.trainer:169 RmsProp 196 loss=4.256283 err=4.12 hid1<0.1=95.9 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.76
INFO:theanets.trainer:RmsProp 197 loss=4.254623 err=4.12 hid1<0.1=95.9 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.82
I 2015-09-16 01:29:02 theanets.trainer:169 RmsProp 197 loss=4.254623 err=4.12 hid1<0.1=95.9 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.82
INFO:theanets.trainer:RmsProp 198 loss=4.253692 err=4.12 hid1<0.1=95.9 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=9.00
I 2015-09-16 01:29:02 theanets.trainer:169 RmsProp 198 loss=4.253692 err=4.12 hid1<0.1=95.9 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=9.00
INFO:theanets.trainer:RmsProp 199 loss=4.252455 err=4.12 hid1<0.1=95.8 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.76
I 2015-09-16 01:29:02 theanets.trainer:169 RmsProp 199 loss=4.252455 err=4.12 hid1<0.1=95.8 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.76
INFO:theanets.trainer:RmsProp 200 loss=4.252119 err=4.12 hid1<0.1=95.8 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.75
I 2015-09-16 01:29:03 theanets.trainer:169 RmsProp 200 loss=4.252119 err=4.12 hid1<0.1=95.8 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.75
INFO:theanets.trainer:validation 20 loss=4.328119 err=4.19 hid1<0.1=95.8 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.02
I 2015-09-16 01:29:03 theanets.trainer:169 validation 20 loss=4.328119 err=4.19 hid1<0.1=95.8 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.02
INFO:theanets.trainer:RmsProp 201 loss=4.250616 err=4.12 hid1<0.1=95.8 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.97
I 2015-09-16 01:29:03 theanets.trainer:169 RmsProp 201 loss=4.250616 err=4.12 hid1<0.1=95.8 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=8.97
INFO:theanets.trainer:RmsProp 202 loss=4.249609 err=4.12 hid1<0.1=95.8 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=9.00
I 2015-09-16 01:29:04 theanets.trainer:169 RmsProp 202 loss=4.249609 err=4.12 hid1<0.1=95.8 hid1<0.9=98.7 out<0.1=99.8 out<0.9=100.0 acc=9.00
INFO:theanets.trainer:RmsProp 203 loss=4.248490 err=4.11 hid1<0.1=95.8 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=8.75
I 2015-09-16 01:29:04 theanets.trainer:169 RmsProp 203 loss=4.248490 err=4.11 hid1<0.1=95.8 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=8.75
INFO:theanets.trainer:RmsProp 204 loss=4.247209 err=4.11 hid1<0.1=95.8 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=8.79
I 2015-09-16 01:29:04 theanets.trainer:169 RmsProp 204 loss=4.247209 err=4.11 hid1<0.1=95.8 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=8.79
INFO:theanets.trainer:RmsProp 205 loss=4.245596 err=4.11 hid1<0.1=95.8 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=8.83
I 2015-09-16 01:29:05 theanets.trainer:169 RmsProp 205 loss=4.245596 err=4.11 hid1<0.1=95.8 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=8.83
INFO:theanets.trainer:RmsProp 206 loss=4.243911 err=4.11 hid1<0.1=95.8 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.06
I 2015-09-16 01:29:05 theanets.trainer:169 RmsProp 206 loss=4.243911 err=4.11 hid1<0.1=95.8 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.06
INFO:theanets.trainer:RmsProp 207 loss=4.244331 err=4.11 hid1<0.1=95.8 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=8.96
I 2015-09-16 01:29:06 theanets.trainer:169 RmsProp 207 loss=4.244331 err=4.11 hid1<0.1=95.8 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=8.96
INFO:theanets.trainer:RmsProp 208 loss=4.242065 err=4.11 hid1<0.1=95.8 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.05
I 2015-09-16 01:29:06 theanets.trainer:169 RmsProp 208 loss=4.242065 err=4.11 hid1<0.1=95.8 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.05
INFO:theanets.trainer:RmsProp 209 loss=4.241320 err=4.10 hid1<0.1=95.8 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.19
I 2015-09-16 01:29:07 theanets.trainer:169 RmsProp 209 loss=4.241320 err=4.10 hid1<0.1=95.8 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.19
INFO:theanets.trainer:RmsProp 210 loss=4.239153 err=4.10 hid1<0.1=95.8 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.04
I 2015-09-16 01:29:07 theanets.trainer:169 RmsProp 210 loss=4.239153 err=4.10 hid1<0.1=95.8 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.04
INFO:theanets.trainer:validation 21 loss=4.315344 err=4.18 hid1<0.1=95.7 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.39
I 2015-09-16 01:29:07 theanets.trainer:169 validation 21 loss=4.315344 err=4.18 hid1<0.1=95.7 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.39
INFO:theanets.trainer:RmsProp 211 loss=4.238000 err=4.10 hid1<0.1=95.8 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=8.81
I 2015-09-16 01:29:08 theanets.trainer:169 RmsProp 211 loss=4.238000 err=4.10 hid1<0.1=95.8 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=8.81
INFO:theanets.trainer:RmsProp 212 loss=4.238797 err=4.10 hid1<0.1=95.8 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.10
I 2015-09-16 01:29:09 theanets.trainer:169 RmsProp 212 loss=4.238797 err=4.10 hid1<0.1=95.8 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.10
INFO:theanets.trainer:RmsProp 213 loss=4.235607 err=4.10 hid1<0.1=95.7 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.38
I 2015-09-16 01:29:09 theanets.trainer:169 RmsProp 213 loss=4.235607 err=4.10 hid1<0.1=95.7 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.38
INFO:theanets.trainer:RmsProp 214 loss=4.234821 err=4.10 hid1<0.1=95.7 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.10
I 2015-09-16 01:29:10 theanets.trainer:169 RmsProp 214 loss=4.234821 err=4.10 hid1<0.1=95.7 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.10
INFO:theanets.trainer:RmsProp 215 loss=4.233427 err=4.10 hid1<0.1=95.7 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.44
I 2015-09-16 01:29:10 theanets.trainer:169 RmsProp 215 loss=4.233427 err=4.10 hid1<0.1=95.7 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.44
INFO:theanets.trainer:RmsProp 216 loss=4.232158 err=4.09 hid1<0.1=95.7 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.25
I 2015-09-16 01:29:11 theanets.trainer:169 RmsProp 216 loss=4.232158 err=4.09 hid1<0.1=95.7 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.25
INFO:theanets.trainer:RmsProp 217 loss=4.230874 err=4.09 hid1<0.1=95.7 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.28
I 2015-09-16 01:29:11 theanets.trainer:169 RmsProp 217 loss=4.230874 err=4.09 hid1<0.1=95.7 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.28
INFO:theanets.trainer:RmsProp 218 loss=4.229484 err=4.09 hid1<0.1=95.7 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.34
I 2015-09-16 01:29:11 theanets.trainer:169 RmsProp 218 loss=4.229484 err=4.09 hid1<0.1=95.7 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.34
INFO:theanets.trainer:RmsProp 219 loss=4.227684 err=4.09 hid1<0.1=95.7 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.22
I 2015-09-16 01:29:12 theanets.trainer:169 RmsProp 219 loss=4.227684 err=4.09 hid1<0.1=95.7 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.22
INFO:theanets.trainer:RmsProp 220 loss=4.226922 err=4.09 hid1<0.1=95.7 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.32
I 2015-09-16 01:29:12 theanets.trainer:169 RmsProp 220 loss=4.226922 err=4.09 hid1<0.1=95.7 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.32
INFO:theanets.trainer:validation 22 loss=4.299394 err=4.16 hid1<0.1=95.7 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.78 *
I 2015-09-16 01:29:12 theanets.trainer:169 validation 22 loss=4.299394 err=4.16 hid1<0.1=95.7 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.78 *
INFO:theanets.trainer:RmsProp 221 loss=4.225786 err=4.09 hid1<0.1=95.7 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.71
I 2015-09-16 01:29:12 theanets.trainer:169 RmsProp 221 loss=4.225786 err=4.09 hid1<0.1=95.7 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.71
INFO:theanets.trainer:RmsProp 222 loss=4.224913 err=4.09 hid1<0.1=95.7 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.65
I 2015-09-16 01:29:13 theanets.trainer:169 RmsProp 222 loss=4.224913 err=4.09 hid1<0.1=95.7 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.65
INFO:theanets.trainer:RmsProp 223 loss=4.224467 err=4.08 hid1<0.1=95.7 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.56
I 2015-09-16 01:29:14 theanets.trainer:169 RmsProp 223 loss=4.224467 err=4.08 hid1<0.1=95.7 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.56
INFO:theanets.trainer:RmsProp 224 loss=4.223014 err=4.08 hid1<0.1=95.7 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.86
I 2015-09-16 01:29:14 theanets.trainer:169 RmsProp 224 loss=4.223014 err=4.08 hid1<0.1=95.7 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.86
INFO:theanets.trainer:RmsProp 225 loss=4.220676 err=4.08 hid1<0.1=95.6 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.84
I 2015-09-16 01:29:15 theanets.trainer:169 RmsProp 225 loss=4.220676 err=4.08 hid1<0.1=95.6 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.84
INFO:theanets.trainer:RmsProp 226 loss=4.220523 err=4.08 hid1<0.1=95.6 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=10.00
I 2015-09-16 01:29:15 theanets.trainer:169 RmsProp 226 loss=4.220523 err=4.08 hid1<0.1=95.6 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=10.00
INFO:theanets.trainer:RmsProp 227 loss=4.219485 err=4.08 hid1<0.1=95.6 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.73
I 2015-09-16 01:29:16 theanets.trainer:169 RmsProp 227 loss=4.219485 err=4.08 hid1<0.1=95.6 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.73
INFO:theanets.trainer:RmsProp 228 loss=4.219339 err=4.08 hid1<0.1=95.6 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.78
I 2015-09-16 01:29:16 theanets.trainer:169 RmsProp 228 loss=4.219339 err=4.08 hid1<0.1=95.6 hid1<0.9=98.6 out<0.1=99.8 out<0.9=100.0 acc=9.78
INFO:theanets.trainer:RmsProp 229 loss=4.216348 err=4.08 hid1<0.1=95.6 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.57
I 2015-09-16 01:29:16 theanets.trainer:169 RmsProp 229 loss=4.216348 err=4.08 hid1<0.1=95.6 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.57
INFO:theanets.trainer:RmsProp 230 loss=4.216060 err=4.07 hid1<0.1=95.6 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.82
I 2015-09-16 01:29:17 theanets.trainer:169 RmsProp 230 loss=4.216060 err=4.07 hid1<0.1=95.6 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.82
INFO:theanets.trainer:validation 23 loss=4.295299 err=4.15 hid1<0.1=95.6 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=10.30
I 2015-09-16 01:29:17 theanets.trainer:169 validation 23 loss=4.295299 err=4.15 hid1<0.1=95.6 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=10.30
INFO:theanets.trainer:RmsProp 231 loss=4.215235 err=4.07 hid1<0.1=95.6 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.58
I 2015-09-16 01:29:17 theanets.trainer:169 RmsProp 231 loss=4.215235 err=4.07 hid1<0.1=95.6 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.58
INFO:theanets.trainer:RmsProp 232 loss=4.214838 err=4.07 hid1<0.1=95.6 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.76
I 2015-09-16 01:29:17 theanets.trainer:169 RmsProp 232 loss=4.214838 err=4.07 hid1<0.1=95.6 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.76
INFO:theanets.trainer:RmsProp 233 loss=4.212759 err=4.07 hid1<0.1=95.6 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.57
I 2015-09-16 01:29:18 theanets.trainer:169 RmsProp 233 loss=4.212759 err=4.07 hid1<0.1=95.6 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.57
INFO:theanets.trainer:RmsProp 234 loss=4.211907 err=4.07 hid1<0.1=95.6 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.88
I 2015-09-16 01:29:18 theanets.trainer:169 RmsProp 234 loss=4.211907 err=4.07 hid1<0.1=95.6 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.88
INFO:theanets.trainer:RmsProp 235 loss=4.211968 err=4.07 hid1<0.1=95.6 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.45
I 2015-09-16 01:29:18 theanets.trainer:169 RmsProp 235 loss=4.211968 err=4.07 hid1<0.1=95.6 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.45
INFO:theanets.trainer:RmsProp 236 loss=4.209574 err=4.07 hid1<0.1=95.6 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.70
I 2015-09-16 01:29:19 theanets.trainer:169 RmsProp 236 loss=4.209574 err=4.07 hid1<0.1=95.6 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.70
INFO:theanets.trainer:RmsProp 237 loss=4.209501 err=4.07 hid1<0.1=95.6 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.73
I 2015-09-16 01:29:19 theanets.trainer:169 RmsProp 237 loss=4.209501 err=4.07 hid1<0.1=95.6 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.73
INFO:theanets.trainer:RmsProp 238 loss=4.207346 err=4.06 hid1<0.1=95.6 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.90
I 2015-09-16 01:29:20 theanets.trainer:169 RmsProp 238 loss=4.207346 err=4.06 hid1<0.1=95.6 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.90
INFO:theanets.trainer:RmsProp 239 loss=4.207650 err=4.06 hid1<0.1=95.6 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.93
I 2015-09-16 01:29:20 theanets.trainer:169 RmsProp 239 loss=4.207650 err=4.06 hid1<0.1=95.6 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.93
INFO:theanets.trainer:RmsProp 240 loss=4.204970 err=4.06 hid1<0.1=95.6 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.69
I 2015-09-16 01:29:20 theanets.trainer:169 RmsProp 240 loss=4.204970 err=4.06 hid1<0.1=95.6 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.69
INFO:theanets.trainer:validation 24 loss=4.285622 err=4.14 hid1<0.1=95.5 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.59
I 2015-09-16 01:29:20 theanets.trainer:169 validation 24 loss=4.285622 err=4.14 hid1<0.1=95.5 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.59
INFO:theanets.trainer:RmsProp 241 loss=4.205325 err=4.06 hid1<0.1=95.5 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.51
I 2015-09-16 01:29:21 theanets.trainer:169 RmsProp 241 loss=4.205325 err=4.06 hid1<0.1=95.5 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.51
INFO:theanets.trainer:RmsProp 242 loss=4.203033 err=4.06 hid1<0.1=95.5 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.88
I 2015-09-16 01:29:21 theanets.trainer:169 RmsProp 242 loss=4.203033 err=4.06 hid1<0.1=95.5 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.88
INFO:theanets.trainer:RmsProp 243 loss=4.203285 err=4.06 hid1<0.1=95.5 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.68
I 2015-09-16 01:29:21 theanets.trainer:169 RmsProp 243 loss=4.203285 err=4.06 hid1<0.1=95.5 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.68
INFO:theanets.trainer:RmsProp 244 loss=4.200521 err=4.06 hid1<0.1=95.5 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.96
I 2015-09-16 01:29:22 theanets.trainer:169 RmsProp 244 loss=4.200521 err=4.06 hid1<0.1=95.5 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.96
INFO:theanets.trainer:RmsProp 245 loss=4.200701 err=4.06 hid1<0.1=95.5 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=10.02
I 2015-09-16 01:29:22 theanets.trainer:169 RmsProp 245 loss=4.200701 err=4.06 hid1<0.1=95.5 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=10.02
INFO:theanets.trainer:RmsProp 246 loss=4.199728 err=4.05 hid1<0.1=95.5 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.81
I 2015-09-16 01:29:22 theanets.trainer:169 RmsProp 246 loss=4.199728 err=4.05 hid1<0.1=95.5 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.81
INFO:theanets.trainer:RmsProp 247 loss=4.199591 err=4.05 hid1<0.1=95.5 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.84
I 2015-09-16 01:29:23 theanets.trainer:169 RmsProp 247 loss=4.199591 err=4.05 hid1<0.1=95.5 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.84
INFO:theanets.trainer:RmsProp 248 loss=4.197562 err=4.05 hid1<0.1=95.5 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=10.07
I 2015-09-16 01:29:23 theanets.trainer:169 RmsProp 248 loss=4.197562 err=4.05 hid1<0.1=95.5 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=10.07
INFO:theanets.trainer:RmsProp 249 loss=4.195215 err=4.05 hid1<0.1=95.5 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.93
I 2015-09-16 01:29:24 theanets.trainer:169 RmsProp 249 loss=4.195215 err=4.05 hid1<0.1=95.5 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.93
INFO:theanets.trainer:RmsProp 250 loss=4.195418 err=4.05 hid1<0.1=95.5 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.82
I 2015-09-16 01:29:24 theanets.trainer:169 RmsProp 250 loss=4.195418 err=4.05 hid1<0.1=95.5 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.82
INFO:theanets.trainer:validation 25 loss=4.270052 err=4.12 hid1<0.1=95.5 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.76
I 2015-09-16 01:29:24 theanets.trainer:169 validation 25 loss=4.270052 err=4.12 hid1<0.1=95.5 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.76
INFO:theanets.trainer:RmsProp 251 loss=4.194254 err=4.05 hid1<0.1=95.5 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.77
I 2015-09-16 01:29:25 theanets.trainer:169 RmsProp 251 loss=4.194254 err=4.05 hid1<0.1=95.5 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.77
INFO:theanets.trainer:RmsProp 252 loss=4.193425 err=4.05 hid1<0.1=95.5 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.67
I 2015-09-16 01:29:25 theanets.trainer:169 RmsProp 252 loss=4.193425 err=4.05 hid1<0.1=95.5 hid1<0.9=98.5 out<0.1=99.8 out<0.9=100.0 acc=9.67
INFO:theanets.trainer:RmsProp 253 loss=4.193451 err=4.05 hid1<0.1=95.5 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.26
I 2015-09-16 01:29:26 theanets.trainer:169 RmsProp 253 loss=4.193451 err=4.05 hid1<0.1=95.5 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.26
INFO:theanets.trainer:RmsProp 254 loss=4.191326 err=4.04 hid1<0.1=95.5 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.29
I 2015-09-16 01:29:26 theanets.trainer:169 RmsProp 254 loss=4.191326 err=4.04 hid1<0.1=95.5 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.29
INFO:theanets.trainer:RmsProp 255 loss=4.190475 err=4.04 hid1<0.1=95.5 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=9.96
I 2015-09-16 01:29:27 theanets.trainer:169 RmsProp 255 loss=4.190475 err=4.04 hid1<0.1=95.5 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=9.96
INFO:theanets.trainer:RmsProp 256 loss=4.190141 err=4.04 hid1<0.1=95.5 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.18
I 2015-09-16 01:29:27 theanets.trainer:169 RmsProp 256 loss=4.190141 err=4.04 hid1<0.1=95.5 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.18
INFO:theanets.trainer:RmsProp 257 loss=4.191505 err=4.04 hid1<0.1=95.5 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.10
I 2015-09-16 01:29:27 theanets.trainer:169 RmsProp 257 loss=4.191505 err=4.04 hid1<0.1=95.5 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.10
INFO:theanets.trainer:RmsProp 258 loss=4.189242 err=4.04 hid1<0.1=95.5 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.16
I 2015-09-16 01:29:28 theanets.trainer:169 RmsProp 258 loss=4.189242 err=4.04 hid1<0.1=95.5 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.16
INFO:theanets.trainer:RmsProp 259 loss=4.188633 err=4.04 hid1<0.1=95.4 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.18
I 2015-09-16 01:29:28 theanets.trainer:169 RmsProp 259 loss=4.188633 err=4.04 hid1<0.1=95.4 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.18
INFO:theanets.trainer:RmsProp 260 loss=4.186866 err=4.04 hid1<0.1=95.4 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=9.98
I 2015-09-16 01:29:29 theanets.trainer:169 RmsProp 260 loss=4.186866 err=4.04 hid1<0.1=95.4 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=9.98
INFO:theanets.trainer:validation 26 loss=4.274318 err=4.12 hid1<0.1=95.4 hid1<0.9=98.3 out<0.1=99.7 out<0.9=100.0 acc=10.75
I 2015-09-16 01:29:29 theanets.trainer:169 validation 26 loss=4.274318 err=4.12 hid1<0.1=95.4 hid1<0.9=98.3 out<0.1=99.7 out<0.9=100.0 acc=10.75
INFO:theanets.trainer:RmsProp 261 loss=4.186627 err=4.04 hid1<0.1=95.4 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.20
I 2015-09-16 01:29:29 theanets.trainer:169 RmsProp 261 loss=4.186627 err=4.04 hid1<0.1=95.4 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.20
INFO:theanets.trainer:RmsProp 262 loss=4.185694 err=4.04 hid1<0.1=95.4 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=9.90
I 2015-09-16 01:29:30 theanets.trainer:169 RmsProp 262 loss=4.185694 err=4.04 hid1<0.1=95.4 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=9.90
INFO:theanets.trainer:RmsProp 263 loss=4.184949 err=4.04 hid1<0.1=95.4 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.28
I 2015-09-16 01:29:30 theanets.trainer:169 RmsProp 263 loss=4.184949 err=4.04 hid1<0.1=95.4 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.28
INFO:theanets.trainer:RmsProp 264 loss=4.184526 err=4.04 hid1<0.1=95.4 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.36
I 2015-09-16 01:29:31 theanets.trainer:169 RmsProp 264 loss=4.184526 err=4.04 hid1<0.1=95.4 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.36
INFO:theanets.trainer:RmsProp 265 loss=4.183216 err=4.03 hid1<0.1=95.4 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.20
I 2015-09-16 01:29:31 theanets.trainer:169 RmsProp 265 loss=4.183216 err=4.03 hid1<0.1=95.4 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.20
INFO:theanets.trainer:RmsProp 266 loss=4.181234 err=4.03 hid1<0.1=95.4 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.22
I 2015-09-16 01:29:32 theanets.trainer:169 RmsProp 266 loss=4.181234 err=4.03 hid1<0.1=95.4 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.22
INFO:theanets.trainer:RmsProp 267 loss=4.181140 err=4.03 hid1<0.1=95.4 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.21
I 2015-09-16 01:29:32 theanets.trainer:169 RmsProp 267 loss=4.181140 err=4.03 hid1<0.1=95.4 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.21
INFO:theanets.trainer:RmsProp 268 loss=4.180447 err=4.03 hid1<0.1=95.4 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.21
I 2015-09-16 01:29:33 theanets.trainer:169 RmsProp 268 loss=4.180447 err=4.03 hid1<0.1=95.4 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.21
INFO:theanets.trainer:RmsProp 269 loss=4.179091 err=4.03 hid1<0.1=95.4 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.34
I 2015-09-16 01:29:33 theanets.trainer:169 RmsProp 269 loss=4.179091 err=4.03 hid1<0.1=95.4 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.34
INFO:theanets.trainer:RmsProp 270 loss=4.177745 err=4.03 hid1<0.1=95.4 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.45
I 2015-09-16 01:29:33 theanets.trainer:169 RmsProp 270 loss=4.177745 err=4.03 hid1<0.1=95.4 hid1<0.9=98.4 out<0.1=99.8 out<0.9=100.0 acc=10.45
INFO:theanets.trainer:validation 27 loss=4.264519 err=4.11 hid1<0.1=95.3 hid1<0.9=98.3 out<0.1=99.8 out<0.9=100.0 acc=11.36
I 2015-09-16 01:29:34 theanets.trainer:169 validation 27 loss=4.264519 err=4.11 hid1<0.1=95.3 hid1<0.9=98.3 out<0.1=99.8 out<0.9=100.0 acc=11.36
INFO:theanets.trainer:patience elapsed!
I 2015-09-16 01:29:34 theanets.trainer:253 patience elapsed!
Out[285]:
(OrderedDict([('loss', 4.1777453007905381), ('err', 4.0277878788934238), ('hid1<0.1', 95.375), ('hid1<0.9', 98.401494565217391), ('out<0.1', 99.769776570048336), ('out<0.9', 100.0), ('acc', 10.450634057971014)]),
OrderedDict([('loss', 4.2743176740148794), ('err', 4.1196194368859995), ('hid1<0.1', 95.389760375494092), ('hid1<0.9', 98.324357707509876), ('out<0.1', 99.722393774703562), ('out<0.9', 100.0), ('acc', 10.746047430830039)]))
In [286]:
y_pred_nn = exp.network.classify(X_valid)
y_pred_nn
Out[286]:
array([102, 71, 59, ..., 155, 77, 120])
In [287]:
print(classification_report(y_valid, y_pred_nn))
precision recall f1-score support
0 0.00 0.00 0.00 11
1 0.00 0.00 0.00 6
2 0.04 0.22 0.07 9
3 0.00 0.00 0.00 5
4 0.00 0.00 0.00 14
5 0.00 0.00 0.00 23
6 0.00 0.00 0.00 17
7 0.00 0.00 0.00 16
8 0.00 0.00 0.00 16
9 0.00 0.00 0.00 8
10 0.00 0.00 0.00 27
11 0.00 0.00 0.00 17
12 0.00 0.00 0.00 11
13 0.00 0.00 0.00 16
14 0.00 0.00 0.00 14
15 0.00 0.00 0.00 6
16 0.00 0.00 0.00 12
17 0.00 0.00 0.00 19
18 0.00 0.00 0.00 8
19 0.00 0.00 0.00 15
20 0.00 0.00 0.00 9
21 0.00 0.00 0.00 19
22 0.00 0.00 0.00 16
23 0.00 0.00 0.00 11
24 0.00 0.00 0.00 6
25 0.00 0.00 0.00 18
26 0.14 0.30 0.19 20
27 0.00 0.00 0.00 17
28 0.00 0.00 0.00 16
29 0.00 0.00 0.00 28
30 0.00 0.00 0.00 12
31 0.25 0.11 0.15 18
32 0.00 0.00 0.00 15
33 0.00 0.00 0.00 19
34 0.00 0.00 0.00 11
35 0.00 0.00 0.00 14
36 0.00 0.00 0.00 11
37 0.00 0.00 0.00 17
38 0.00 0.00 0.00 14
39 0.00 0.00 0.00 13
40 0.00 0.00 0.00 14
41 0.00 0.00 0.00 12
42 0.14 0.23 0.18 22
43 0.00 0.00 0.00 17
44 0.00 0.00 0.00 6
45 0.00 0.00 0.00 13
46 0.39 0.73 0.51 15
47 0.00 0.00 0.00 14
48 0.00 0.00 0.00 23
49 0.17 0.14 0.15 21
50 0.00 0.00 0.00 13
51 0.00 0.00 0.00 18
52 0.00 0.00 0.00 15
53 0.00 0.00 0.00 10
54 0.00 0.00 0.00 9
55 0.00 0.00 0.00 10
56 0.00 0.00 0.00 22
57 0.00 0.00 0.00 22
58 0.32 0.31 0.31 26
59 0.17 0.50 0.26 16
60 0.00 0.00 0.00 15
61 0.00 0.00 0.00 6
62 0.00 0.00 0.00 13
63 0.00 0.00 0.00 16
64 0.00 0.00 0.00 23
65 0.00 0.00 0.00 26
66 0.00 0.00 0.00 16
67 0.00 0.00 0.00 17
68 0.00 0.00 0.00 13
69 0.00 0.00 0.00 11
70 0.00 0.00 0.00 6
71 0.06 0.50 0.10 22
72 0.30 0.27 0.29 11
73 0.00 0.00 0.00 14
74 0.00 0.00 0.00 16
75 0.00 0.00 0.00 15
76 0.00 0.00 0.00 14
77 0.09 0.09 0.09 11
78 0.43 0.69 0.53 29
79 0.00 0.00 0.00 21
80 0.05 0.10 0.07 21
81 0.00 0.00 0.00 4
82 0.00 0.00 0.00 11
83 0.00 0.00 0.00 5
84 0.00 0.00 0.00 10
85 0.05 0.04 0.04 25
86 0.00 0.00 0.00 7
87 0.00 0.00 0.00 33
88 0.19 0.30 0.23 27
89 0.00 0.00 0.00 17
90 0.00 0.00 0.00 14
91 0.00 0.00 0.00 11
92 0.05 0.11 0.07 9
93 0.00 0.00 0.00 7
94 0.00 0.00 0.00 15
95 0.00 0.00 0.00 18
96 0.00 0.00 0.00 9
97 0.00 0.00 0.00 9
98 0.00 0.00 0.00 25
99 0.00 0.00 0.00 24
100 0.00 0.00 0.00 17
101 0.00 0.00 0.00 17
102 0.01 0.13 0.02 15
103 0.00 0.00 0.00 25
104 0.00 0.00 0.00 2
105 0.00 0.00 0.00 17
106 0.00 0.00 0.00 15
107 0.00 0.00 0.00 31
108 0.00 0.00 0.00 15
109 0.00 0.00 0.00 20
110 0.00 0.00 0.00 8
111 0.08 0.17 0.11 18
112 0.00 0.00 0.00 7
113 0.00 0.00 0.00 4
114 0.00 0.00 0.00 10
115 0.00 0.00 0.00 25
116 0.06 0.21 0.10 24
117 0.00 0.00 0.00 23
118 0.29 0.38 0.33 37
119 0.00 0.00 0.00 19
120 0.14 0.31 0.19 29
121 0.00 0.00 0.00 20
122 0.12 0.33 0.18 30
123 0.00 0.00 0.00 15
124 0.11 0.40 0.17 20
125 0.00 0.00 0.00 4
126 0.00 0.00 0.00 28
127 0.01 0.04 0.02 28
128 0.00 0.00 0.00 20
129 0.14 0.18 0.16 17
130 0.00 0.00 0.00 11
131 0.03 0.10 0.04 31
132 0.11 0.21 0.15 14
133 0.00 0.00 0.00 23
134 0.00 0.00 0.00 11
135 0.00 0.00 0.00 4
136 0.04 0.10 0.06 10
137 0.00 0.00 0.00 26
138 0.00 0.00 0.00 9
139 0.00 0.00 0.00 12
140 0.05 0.23 0.08 22
141 0.45 0.31 0.37 29
142 0.00 0.00 0.00 25
143 0.00 0.00 0.00 7
144 0.09 0.41 0.14 17
145 0.03 0.25 0.06 12
146 0.00 0.00 0.00 19
147 0.00 0.00 0.00 16
148 0.00 0.00 0.00 18
149 0.50 0.12 0.20 16
150 0.04 0.22 0.06 23
151 0.00 0.00 0.00 10
152 0.00 0.00 0.00 17
153 0.13 0.64 0.22 44
154 0.00 0.00 0.00 16
155 0.07 0.38 0.12 21
156 0.23 0.45 0.31 65
157 0.00 0.00 0.00 22
158 0.00 0.00 0.00 12
159 0.00 0.00 0.00 30
160 0.00 0.00 0.00 5
161 0.00 0.00 0.00 7
162 0.00 0.00 0.00 22
163 0.00 0.00 0.00 12
164 0.00 0.00 0.00 5
165 0.00 0.00 0.00 8
166 0.00 0.00 0.00 15
167 0.00 0.00 0.00 6
168 0.00 0.00 0.00 16
169 0.00 0.00 0.00 19
170 0.50 0.08 0.14 12
171 0.50 0.14 0.22 14
172 0.00 0.00 0.00 8
173 0.05 0.46 0.09 28
174 0.00 0.00 0.00 3
175 0.14 0.25 0.18 32
176 0.00 0.00 0.00 9
177 0.16 0.52 0.24 23
178 0.00 0.00 0.00 5
179 0.00 0.00 0.00 15
avg / total 0.05 0.10 0.06 2924
/Users/bzamecnik/Documents/dev/pyvenv/py3.4/lib/python3.4/site-packages/sklearn/metrics/metrics.py:1771: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.
'precision', 'predicted', average, warn_for)
In [288]:
matshow(confusion_matrix(y_valid, y_pred_nn), cmap=cm.Spectral_r)
colorbar();
In [ ]:
In [ ]:
Content source: bzamecnik/ml-playground
Similar notebooks: