In [1]:
from scipy.io import loadmat
import plotly.offline as py
import plotly.graph_objs as go
from plotly import tools
import numpy as np
import tensorbuilder
from tensorbuilder import tensorbuilder as tb
import tensorflow as tf
from sklearn.preprocessing import OneHotEncoder
from datetime import datetime

reload(tensorbuilder)

py.init_notebook_mode()


/usr/local/lib/python2.7/dist-packages/requests/packages/urllib3/util/ssl_.py:318: SNIMissingWarning: An HTTPS request has been made, but the SNI (Subject Name Indication) extension to TLS is not available on this platform. This may cause the server to present an incorrect TLS certificate, which can cause validation failures. You can upgrade to a newer version of Python to solve this. For more information, see https://urllib3.readthedocs.io/en/latest/security.html#snimissingwarning.
  SNIMissingWarning
/usr/local/lib/python2.7/dist-packages/requests/packages/urllib3/util/ssl_.py:122: InsecurePlatformWarning: A true SSLContext object is not available. This prevents urllib3 from configuring SSL appropriately and may cause certain SSL connections to fail. You can upgrade to a newer version of Python to solve this. For more information, see https://urllib3.readthedocs.io/en/latest/security.html#insecureplatformwarning.
  InsecurePlatformWarning

In [2]:
mat = loadmat('mnist.mat')
x_data = mat['X']
y_data = mat['y']

x_data = x_data.reshape([5000, 20, 20, 1])
x_data = np.transpose(x_data, [0, 2, 1, 3]) #transpose
x_data = x_data[:,::-1, :, :] #flip

y_data = y_data % 10
y_data = OneHotEncoder().fit_transform(y_data).todense()

data = tb.data(
    x=x_data, 
    y=y_data
)

[training_data, test_data] = data.split(0.8, 0.2)

In [3]:
axis_ops = dict(
    autorange=True,
    showgrid=False,
    zeroline=False,
    showline=False,
    autotick=True,
    ticks='',
    showticklabels=False
)
layout = go.Layout(xaxis=axis_ops, yaxis=axis_ops)
fig = tools.make_subplots(rows=2, cols=5, print_grid=False)
def heatmap(z, smooth='best', reversescale=True):
    return go.Heatmap(
        z=z,
        colorscale='Greys',
        reversescale=reversescale,
        showscale=False,
        showlegend=False,
        zsmooth=smooth
    )
def printmat(x):
    print(np.array2string(np.abs(x[::-1, :]), formatter={'float_kind':'{0:.1f}'.format}))

for row in range(1, 3):
    for col in range(1, 6):
        i = 500 * ((row - 1) * 5 + (col - 1)) + 250
        trace = heatmap(x_data[i][:, :, 0])
        fig.append_trace(trace, row, col)

for i in range(10):
    fig['layout']['xaxis{0}'.format(i+1)].update(**axis_ops)
    fig['layout']['yaxis{0}'.format(i+1)].update(**axis_ops)


py.iplot(fig)



In [5]:
sample_indexes = [ 500 * i + 250 for i in range(10) ]

sample_features = x_data[sample_indexes]
sample_labels = y_data[sample_indexes]

sample = sample_features[0][:, 2:-2, 0]

hmap = heatmap(sample)
hlayout = layout.copy()
hlayout.update(width=700,height=700)

py.iplot(go.Figure(data=[hmap], layout=hlayout))
printmat(sample)


[[0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0]
 [0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0]
 [0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0]
 [0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.1 0.3 0.4 0.2 0.0 0.0 0.0 0.0 0.0]
 [0.0 0.0 0.0 0.0 0.0 0.0 0.1 0.7 0.9 1.0 1.0 0.6 0.1 0.0 0.0 0.0]
 [0.0 0.0 0.0 0.0 0.0 0.0 0.3 1.0 1.0 1.0 0.9 1.0 0.7 0.1 0.0 0.0]
 [0.0 0.0 0.0 0.0 0.0 0.1 0.7 1.0 1.0 0.9 0.2 0.6 1.1 0.5 0.0 0.0]
 [0.0 0.0 0.0 0.0 0.0 0.6 1.0 0.8 0.5 0.2 0.1 0.4 1.1 0.6 0.0 0.0]
 [0.0 0.0 0.0 0.0 0.3 1.0 0.8 0.1 0.0 0.0 0.0 0.2 0.9 0.6 0.0 0.0]
 [0.0 0.0 0.0 0.1 0.8 1.0 0.4 0.0 0.0 0.0 0.0 0.1 0.8 0.6 0.0 0.0]
 [0.0 0.0 0.0 0.5 1.0 0.7 0.0 0.0 0.0 0.0 0.0 0.2 1.0 0.6 0.0 0.0]
 [0.0 0.0 0.2 0.9 1.0 0.2 0.0 0.0 0.0 0.0 0.0 0.6 1.0 0.3 0.0 0.0]
 [0.0 0.0 0.6 1.0 0.6 0.0 0.0 0.0 0.0 0.0 0.4 1.0 0.7 0.0 0.0 0.0]
 [0.0 0.0 0.6 0.9 0.2 0.0 0.0 0.0 0.0 0.3 1.0 0.9 0.2 0.0 0.0 0.0]
 [0.0 0.0 0.7 0.8 0.1 0.0 0.0 0.1 0.4 1.0 0.9 0.2 0.0 0.0 0.0 0.0]
 [0.0 0.0 0.8 0.9 0.3 0.2 0.4 0.8 1.0 0.8 0.2 0.0 0.0 0.0 0.0 0.0]
 [0.0 0.0 0.5 1.0 1.0 1.0 1.0 0.9 0.5 0.1 0.0 0.0 0.0 0.0 0.0 0.0]
 [0.0 0.0 0.0 0.4 0.7 0.7 0.5 0.2 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0]
 [0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0]
 [0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0]]

In [15]:
graph = tf.Graph()
with graph.as_default():
    
    with tf.name_scope('inputs'):
        x = tf.placeholder('float', shape=[None, 20, 20, 1], name='x')
        y = tf.placeholder('float', shape=[None, 10], name='y')
        learning_rate = tf.placeholder('float', name='learning_rate')
        keep_prob = tf.placeholder('float', name='keep_prob')
        
    
    #output builders
    
#     with tf.device('/gpu:0'):
    [h, trainer, conv1, conv2, loss] = tb(
        x,
        tb
        .elu_conv2d_layer(16, 3).on('conv1') #, scope='conv2d_16_relu_3x3')
        .max_pool2d(2)

        .elu_conv2d_layer(32, 3).on('conv2') #, scope='conv2d_32_relu_3x3')
        .max_pool2d(2)
        
        .elu_conv2d_layer(64, 3, padding='VALID')
        
        .elu_conv2d_layer(128, 3, padding='VALID')

        .flatten()

        .dropout(keep_prob)
        
        .linear_layer(10), #, scope='logits'),
        [
            tb.softmax(name='h').on('h')
        ,
            (
                tb.With( tf.name_scope('loss'),
                    tb.softmax_cross_entropy_with_logits(y).reduce_mean().make_scalar_summary('loss').on('loss')
                ),
                tb.minimize(tf.train.AdadeltaOptimizer(learning_rate)).on('trainer')
            )
        ],
        ['h', 'trainer', 'conv1', 'conv2', 'loss']
    )
    
    with tf.name_scope('accuracy'):
        correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(h, 1))
        accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
        tf.scalar_summary('accuracy', accuracy)
    
    summaries = tf.merge_all_summaries()
    saver = tf.train.Saver()
    
    
    logs_dir = "/logs/" + datetime.now().strftime('%m_%d_%Y_%H_%M_%S') 
    writter = tf.train.SummaryWriter(logs_dir, graph=graph)

    
def feed_dict(data, rate, prob):
    return {
        x: data.x,
        y: data.y,
        learning_rate: rate,
        keep_prob: prob
    }

In [16]:
model_dir = 'mnist-conv-only.model'
    
with tf.Session(graph=graph, config=tf.ConfigProto(log_device_placement=True)) as sess:
    sess.run(tf.initialize_all_variables()); 
    best = 0.0
    rate = 0.01
    #saver.restore(sess, model_dir)
    
    for step, datum in training_data.epochs(20000).batches_of(500).enumerated():
        
        
        [_, _summaries] = sess.run(
            [trainer, summaries], 
            feed_dict=feed_dict(datum, rate, 0.4)
        )
        
        if step % 5 == 0:
            writter.add_summary(_summaries, step)
        
        
        if step % 200 == 0:
            [test_loss, test_accuracy] = sess.run(
                [loss, accuracy],
                feed_dict=feed_dict(test_data, rate, 1.0)
            )
            [train_loss, train_accuracy] = sess.run(
                [loss, accuracy],
                feed_dict=feed_dict(training_data, rate, 1.0)
            )
            
            print "Step ", step
            print "[Test]  loss: {0}, accuracy: {1}".format(test_loss, test_accuracy)
            print "[Train] loss: {0}, accuracy: {1}".format(train_loss, train_accuracy)
            
            if test_accuracy > best:
                print "Saving"
                saver.save(sess, model_dir)
                best = test_accuracy
                
            print ""


Step  0
[Test]  loss: 2.29211807251, accuracy: 0.112000003457
[Train] loss: 2.29423260689, accuracy: 0.115500003099
Saving

Step  200
[Test]  loss: 1.45460486412, accuracy: 0.715000033379
[Train] loss: 1.46319389343, accuracy: 0.736500024796
Saving

Step  400
[Test]  loss: 0.547269642353, accuracy: 0.847999989986
[Train] loss: 0.56057202816, accuracy: 0.854500055313
Saving

Step  600
[Test]  loss: 0.371990293264, accuracy: 0.89200001955
[Train] loss: 0.383086711168, accuracy: 0.894000053406
Saving

Step  800
[Test]  loss: 0.294044315815, accuracy: 0.913000047207
[Train] loss: 0.303965449333, accuracy: 0.914250135422
Saving

Step  1000
[Test]  loss: 0.243136256933, accuracy: 0.925999999046
[Train] loss: 0.250524163246, accuracy: 0.930750072002
Saving

Step  1200
[Test]  loss: 0.210943579674, accuracy: 0.926000058651
[Train] loss: 0.213677838445, accuracy: 0.938250124454
Saving

Step  1400
[Test]  loss: 0.186957523227, accuracy: 0.939999997616
[Train] loss: 0.185363918543, accuracy: 0.946500122547
Saving

Step  1600
[Test]  loss: 0.169337242842, accuracy: 0.942000031471
[Train] loss: 0.16368368268, accuracy: 0.954500079155
Saving

Step  1800
[Test]  loss: 0.155561760068, accuracy: 0.948000073433
[Train] loss: 0.146577641368, accuracy: 0.959250032902
Saving

Step  2000
[Test]  loss: 0.145013511181, accuracy: 0.952000081539
[Train] loss: 0.133421182632, accuracy: 0.960000097752
Saving

Step  2200
[Test]  loss: 0.135558962822, accuracy: 0.958000004292
[Train] loss: 0.118019118905, accuracy: 0.969000101089
Saving

Step  2400
[Test]  loss: 0.126990482211, accuracy: 0.958000004292
[Train] loss: 0.108164943755, accuracy: 0.970750153065

Step  2600
[Test]  loss: 0.123922266066, accuracy: 0.962000131607
[Train] loss: 0.098623752594, accuracy: 0.973750114441
Saving

Step  2800
[Test]  loss: 0.118840381503, accuracy: 0.963000059128
[Train] loss: 0.0902587473392, accuracy: 0.974500060081
Saving

Step  3000
[Test]  loss: 0.117882527411, accuracy: 0.962000131607
[Train] loss: 0.0848418027163, accuracy: 0.975750029087

Step  3200
[Test]  loss: 0.115989744663, accuracy: 0.962999999523
[Train] loss: 0.0776446610689, accuracy: 0.979250133038

Step  3400
[Test]  loss: 0.108925513923, accuracy: 0.966000080109
[Train] loss: 0.0726128816605, accuracy: 0.979500055313
Saving

Step  3600
[Test]  loss: 0.107187993824, accuracy: 0.967000067234
[Train] loss: 0.0665224120021, accuracy: 0.983000099659
Saving

Step  3800
[Test]  loss: 0.108847871423, accuracy: 0.967000067234
[Train] loss: 0.0625766664743, accuracy: 0.984000086784

Step  4000
[Test]  loss: 0.106398336589, accuracy: 0.963000059128
[Train] loss: 0.0591484792531, accuracy: 0.984500169754

Step  4200
[Test]  loss: 0.101018331945, accuracy: 0.965000092983
[Train] loss: 0.0531566627324, accuracy: 0.986500144005

Step  4400
[Test]  loss: 0.100670270622, accuracy: 0.966000080109
[Train] loss: 0.0506397821009, accuracy: 0.986500024796

Step  4600
[Test]  loss: 0.0979802981019, accuracy: 0.97000002861
[Train] loss: 0.0461413860321, accuracy: 0.989500105381
Saving

Step  4800
[Test]  loss: 0.0974231734872, accuracy: 0.969000041485
[Train] loss: 0.0435087941587, accuracy: 0.989750027657

Step  5000
[Test]  loss: 0.097541578114, accuracy: 0.968000054359
[Train] loss: 0.0400861203671, accuracy: 0.991000115871

Step  5200
[Test]  loss: 0.0956977307796, accuracy: 0.974000036716
[Train] loss: 0.0391171500087, accuracy: 0.990749955177
Saving

Step  5400
[Test]  loss: 0.096875667572, accuracy: 0.969000101089
[Train] loss: 0.0371902771294, accuracy: 0.991500139236

Step  5600
[Test]  loss: 0.0945744514465, accuracy: 0.967000067234
[Train] loss: 0.0335449576378, accuracy: 0.992750108242

Step  5800
[Test]  loss: 0.0961223766208, accuracy: 0.969000041485
[Train] loss: 0.0303419735283, accuracy: 0.993000030518

Step  6000
[Test]  loss: 0.0932109430432, accuracy: 0.967000007629
[Train] loss: 0.0286728795618, accuracy: 0.993000030518

Step  6200
[Test]  loss: 0.089908093214, accuracy: 0.97000002861
[Train] loss: 0.0267916843295, accuracy: 0.994000136852

Step  6400
[Test]  loss: 0.0934402346611, accuracy: 0.969000041485
[Train] loss: 0.0248438380659, accuracy: 0.994250118732

Step  6600
[Test]  loss: 0.0902707725763, accuracy: 0.969000101089
[Train] loss: 0.0243868157268, accuracy: 0.994250059128

Step  6800
[Test]  loss: 0.0939688384533, accuracy: 0.969000101089
[Train] loss: 0.0222392976284, accuracy: 0.994500041008

Step  7000
[Test]  loss: 0.0899602472782, accuracy: 0.968000054359
[Train] loss: 0.0204516518861, accuracy: 0.995250165462

Step  7200
[Test]  loss: 0.0951465889812, accuracy: 0.968000113964
[Train] loss: 0.0203317720443, accuracy: 0.995500087738

Step  7400
[Test]  loss: 0.0882396623492, accuracy: 0.97100007534
[Train] loss: 0.0176293700933, accuracy: 0.996250092983

Step  7600
[Test]  loss: 0.0887360274792, accuracy: 0.97100007534
[Train] loss: 0.0169745776802, accuracy: 0.996250033379

Step  7800
[Test]  loss: 0.0866044461727, accuracy: 0.972000002861
[Train] loss: 0.0153022808954, accuracy: 0.996000051498

Step  8000
[Test]  loss: 0.0919567942619, accuracy: 0.970000088215
[Train] loss: 0.0145661514252, accuracy: 0.996750116348

Step  8200
[Test]  loss: 0.0898307189345, accuracy: 0.972000002861
[Train] loss: 0.0132084432989, accuracy: 0.997250139713

Step  8400
[Test]  loss: 0.0890099704266, accuracy: 0.972000002861
[Train] loss: 0.0127233201638, accuracy: 0.997750103474

Step  8600
[Test]  loss: 0.0872589200735, accuracy: 0.972000062466
[Train] loss: 0.0120326522738, accuracy: 0.997750043869

Step  8800
[Test]  loss: 0.0864012986422, accuracy: 0.972000002861
[Train] loss: 0.0113519821316, accuracy: 0.997750163078

Step  9000
[Test]  loss: 0.0876462310553, accuracy: 0.972000002861
[Train] loss: 0.0104630375281, accuracy: 0.998000085354

Step  9200
[Test]  loss: 0.0895759761333, accuracy: 0.97100007534
[Train] loss: 0.00996027514338, accuracy: 0.998500108719

Step  9400
[Test]  loss: 0.0884279608727, accuracy: 0.97100007534
[Train] loss: 0.00920971296728, accuracy: 0.998500108719

Step  9600
[Test]  loss: 0.0881142318249, accuracy: 0.97100007534
[Train] loss: 0.00917288661003, accuracy: 0.998250126839

Step  9800
[Test]  loss: 0.0890470594168, accuracy: 0.973000049591
[Train] loss: 0.0082649923861, accuracy: 0.999250054359

Step  10000
[Test]  loss: 0.0890729725361, accuracy: 0.972000062466
[Train] loss: 0.00744660710916, accuracy: 0.999250054359

Step  10200
[Test]  loss: 0.091661080718, accuracy: 0.97100007534
[Train] loss: 0.00728806015104, accuracy: 0.998750090599

Step  10400
[Test]  loss: 0.0878202617168, accuracy: 0.97000002861
[Train] loss: 0.00706719513983, accuracy: 0.999250054359

Step  10600
[Test]  loss: 0.0914891958237, accuracy: 0.97100007534
[Train] loss: 0.00637581059709, accuracy: 0.999250054359

Step  10800
[Test]  loss: 0.0889180898666, accuracy: 0.97100007534
[Train] loss: 0.00599149381742, accuracy: 0.999500095844

Step  11000
[Test]  loss: 0.0908008813858, accuracy: 0.969000101089
[Train] loss: 0.00590619817376, accuracy: 0.999250054359

Step  11200
[Test]  loss: 0.0887667015195, accuracy: 0.972000062466
[Train] loss: 0.00546912848949, accuracy: 0.99950003624

Step  11400
[Test]  loss: 0.088585741818, accuracy: 0.97100007534
[Train] loss: 0.00513876415789, accuracy: 0.999250054359

Step  11600
[Test]  loss: 0.088339202106, accuracy: 0.97000002861
[Train] loss: 0.00473644211888, accuracy: 0.999500155449

Step  11800
[Test]  loss: 0.0893681645393, accuracy: 0.973000049591
[Train] loss: 0.00463147414848, accuracy: 0.999750077724

Step  12000
[Test]  loss: 0.0894398614764, accuracy: 0.97000002861
[Train] loss: 0.00450105825439, accuracy: 0.999750077724

Step  12200
[Test]  loss: 0.0908785462379, accuracy: 0.971000015736
[Train] loss: 0.0039752968587, accuracy: 0.999750077724

Step  12400
[Test]  loss: 0.0870287641883, accuracy: 0.972000002861
[Train] loss: 0.00390860252082, accuracy: 0.999750077724

Step  12600
[Test]  loss: 0.0912239551544, accuracy: 0.97100007534
[Train] loss: 0.00358602451161, accuracy: 0.999750077724

Step  12800
[Test]  loss: 0.0928879156709, accuracy: 0.97100007534
[Train] loss: 0.00326515035704, accuracy: 1.00000011921

Step  13000
[Test]  loss: 0.0907796919346, accuracy: 0.972999989986
[Train] loss: 0.00319997081533, accuracy: 0.999750077724

Step  13200
[Test]  loss: 0.0914117842913, accuracy: 0.97000002861
[Train] loss: 0.00310352887027, accuracy: 0.999750077724

Step  13400
[Test]  loss: 0.0901842564344, accuracy: 0.97000002861
[Train] loss: 0.00296216807328, accuracy: 1.00000011921

Step  13600
[Test]  loss: 0.0954992994666, accuracy: 0.97100007534
[Train] loss: 0.00275525730103, accuracy: 1.0

Step  13800
[Test]  loss: 0.0910477489233, accuracy: 0.97000002861
[Train] loss: 0.0025238650851, accuracy: 1.00000011921

Step  14000
[Test]  loss: 0.0894991531968, accuracy: 0.971000015736
[Train] loss: 0.00265391985886, accuracy: 1.00000011921

Step  14200
[Test]  loss: 0.0911946296692, accuracy: 0.97100007534
[Train] loss: 0.00248092738912, accuracy: 1.00000011921

Step  14400
[Test]  loss: 0.0915468484163, accuracy: 0.97100007534
[Train] loss: 0.00227109901607, accuracy: 1.00000011921

Step  14600
[Test]  loss: 0.0916983634233, accuracy: 0.97000002861
[Train] loss: 0.00212597823702, accuracy: 1.00000011921

Step  14800
[Test]  loss: 0.0903840214014, accuracy: 0.97100007534
[Train] loss: 0.00202477234416, accuracy: 1.00000011921

Step  15000
[Test]  loss: 0.0905034095049, accuracy: 0.971000015736
[Train] loss: 0.00199853954837, accuracy: 1.00000011921

Step  15200
[Test]  loss: 0.0920734554529, accuracy: 0.971000015736
[Train] loss: 0.00187373545486, accuracy: 1.00000011921

Step  15400
[Test]  loss: 0.0924206748605, accuracy: 0.97000002861
[Train] loss: 0.00184154673479, accuracy: 1.00000011921

Step  15600
[Test]  loss: 0.0914061740041, accuracy: 0.97000002861
[Train] loss: 0.00180465588346, accuracy: 1.00000011921

Step  15800
[Test]  loss: 0.0948817655444, accuracy: 0.97100007534
[Train] loss: 0.00169139658101, accuracy: 1.00000011921

Step  16000
[Test]  loss: 0.092580601573, accuracy: 0.97100007534
[Train] loss: 0.00156721589155, accuracy: 1.00000011921

Step  16200
[Test]  loss: 0.0918161123991, accuracy: 0.97000002861
[Train] loss: 0.00151309650391, accuracy: 1.00000011921

Step  16400
[Test]  loss: 0.09270285815, accuracy: 0.971000015736
[Train] loss: 0.00156214390881, accuracy: 1.00000011921

Step  16600
[Test]  loss: 0.0955192446709, accuracy: 0.97100007534
[Train] loss: 0.0014662174508, accuracy: 1.00000011921

Step  16800
[Test]  loss: 0.0963758155704, accuracy: 0.97000002861
[Train] loss: 0.00138795375824, accuracy: 1.00000011921

Step  17000
[Test]  loss: 0.0978658348322, accuracy: 0.97100007534
[Train] loss: 0.00142315833364, accuracy: 1.00000011921

Step  17200
[Test]  loss: 0.0947190672159, accuracy: 0.97000002861
[Train] loss: 0.00140084826853, accuracy: 1.00000011921

Step  17400
[Test]  loss: 0.0951734259725, accuracy: 0.97000002861
[Train] loss: 0.00119268707931, accuracy: 1.00000011921

Step  17600
[Test]  loss: 0.0956219211221, accuracy: 0.971000015736
[Train] loss: 0.00114819430746, accuracy: 1.00000011921

Step  17800
[Test]  loss: 0.0952445119619, accuracy: 0.972000062466
[Train] loss: 0.00120735482778, accuracy: 1.00000011921

Step  18000
[Test]  loss: 0.0944766625762, accuracy: 0.972000002861
[Train] loss: 0.00110859668348, accuracy: 1.00000011921

Step  18200
[Test]  loss: 0.0943832471967, accuracy: 0.971000015736
[Train] loss: 0.00106250180397, accuracy: 1.00000011921

Step  18400
[Test]  loss: 0.0932471752167, accuracy: 0.97000002861
[Train] loss: 0.00102047575638, accuracy: 1.00000011921

Step  18600
[Test]  loss: 0.0949320048094, accuracy: 0.97000002861
[Train] loss: 0.000988183077425, accuracy: 1.00000011921

Step  18800
[Test]  loss: 0.0942324101925, accuracy: 0.97000002861
[Train] loss: 0.000969504064415, accuracy: 1.00000011921

Step  19000
[Test]  loss: 0.0978661254048, accuracy: 0.97100007534
[Train] loss: 0.000932077819016, accuracy: 1.00000011921

Step  19200
[Test]  loss: 0.0956512093544, accuracy: 0.97000002861
[Train] loss: 0.00101366755553, accuracy: 1.00000011921

Step  19400
[Test]  loss: 0.096945039928, accuracy: 0.971000015736
[Train] loss: 0.00085820176173, accuracy: 1.00000011921

Step  19600
[Test]  loss: 0.0993446484208, accuracy: 0.972000002861
[Train] loss: 0.000853685021866, accuracy: 1.00000011921

Step  19800
[Test]  loss: 0.0966393426061, accuracy: 0.97100007534
[Train] loss: 0.000842115725391, accuracy: 1.00000011921


In [9]:
image_index = 4

with tf.Session(graph=graph) as sess:
    saver.restore(sess, model_dir)
    
    with tf.variable_scope("Conv", reuse=True):
        conv_weights = tf.get_variable("weights").eval().reshape([16, 3, 3])
        
    [_conv1, _conv2] = sess.run([conv1, conv2], feed_dict={x: sample_features[image_index:image_index+1, :, :, :]})
    
        

# weight plots
fig = tools.make_subplots(rows=4, cols=4, print_grid=False)

for row in range(4):
    for col in range(4):
        i = 4 * row + col
        trace = heatmap(conv_weights[i], smooth=None, reversescale=False)
        fig.append_trace(trace, row+1, col+1)

for i in range(16):
    fig['layout']['xaxis{0}'.format(i+1)].update(**axis_ops)
    fig['layout']['yaxis{0}'.format(i+1)].update(**axis_ops)


py.iplot(fig)

# conv1 plots
hmap = heatmap(sample_features[image_index, :, :, 0])
py.iplot(go.Figure(data=[hmap], layout=hlayout))

fig = tools.make_subplots(rows=4, cols=4, print_grid=False)

for row in range(4):
    for col in range(4):
        i = 4 * row + col
        trace = heatmap(_conv1[0, :, :, i], smooth=None)
        fig.append_trace(trace, row+1, col+1)

for i in range(16):
    fig['layout']['xaxis{0}'.format(i+1)].update(**axis_ops)
    fig['layout']['yaxis{0}'.format(i+1)].update(**axis_ops)
    
fig['layout'].update(width=1000,height=1000)


py.iplot(fig)

# conv2 plots
fig = tools.make_subplots(rows=8, cols=4, print_grid=False)

for row in range(8):
    for col in range(4):
        i = 4 * row + col
        trace = heatmap(_conv2[0, :, :, i], smooth=None)
        fig.append_trace(trace, row+1, col+1)

for i in range(32):
    fig['layout']['xaxis{0}'.format(i+1)].update(**axis_ops)
    fig['layout']['yaxis{0}'.format(i+1)].update(**axis_ops)
    
fig['layout'].update(width=1000,height=1000)


py.iplot(fig)



In [17]:
extra_answers = [4, 6 , 8]
extra_samples = np.array([loadmat('num_{0}.mat'.format(n))['im'][::-1, :] for n in extra_answers])
extra_samples = extra_samples.reshape([3, 20, 20, 1])

answers = range(10) + extra_answers
samples = np.vstack((sample_features, extra_samples))

with tf.Session(graph=graph) as sess:
    saver.restore(sess, model_dir)
    predictions = sess.run(h, feed_dict={x: samples, keep_prob: 1.0})
    prediction_vals = np.argmax(predictions, 1)
    
    
for answer, sample, prediction, prediction_val in zip(answers, samples, predictions, prediction_vals):
    fig = tools.make_subplots(
        rows=1, cols=2, print_grid=False,
        subplot_titles=(
            'Answer {0}'.format(answer),
            'Prediction {0} - {1}'.format(prediction_val, prediction_val == answer)
        )
    )

    heatmap_trace = heatmap(sample[:, :, 0])
    bar_trace = go.Bar(
        x=range(10),
        y=prediction
    )

    fig.append_trace(heatmap_trace, 1, 1)
    fig.append_trace(bar_trace, 1, 2)

    for i in range(2):
        fig['layout']['xaxis{0}'.format(i+1)].update(**axis_ops)
        fig['layout']['yaxis{0}'.format(i+1)].update(**axis_ops)


    py.iplot(fig)



In [ ]: