In [1]:
# TensorBoard Helper Functions and Constants

# Directory to export TensorBoard summary statistics, graph data, etc.
TB_DIR = '/tmp/tensorboard/tf_feedforward'

def clean_tb_dir():
    !rm -rf /tmp/tensorboard/tf_feedforward

def _start_tb(d):
    """
    Private function that calls `tensorboard` shell command
    
    args:
      d: The desired directory to launch in TensorBoard
    """
    !tensorboard --port=6006 --logdir=$d

def start_tensorboard(d=TB_DIR):
    """
    Starts TensorBoard from the notebook in a separate thread.
    Prevents Jupyter Notebook from halting while TensorBoard runs.
    """
    import threading
    threading.Thread(target=_start_tb, args=(TB_DIR,)).start()
    del threading

def stop_tensorboard():
    """
    Kills all TensorBoard processes
    """
    !ps -aef | grep "tensorboard" | tr -s ' ' | cut -d ' ' -f2 | xargs kill -KILL
    
def reset_tensorboard():
    stop_tensorboard()
    start_tensorboard()

In [2]:
# Import core TensorFlow modules
import tensorflow as tf
import numpy as np

In [3]:
# Modules required for file download and extraction
import os
import sys
import tarfile
from six.moves.urllib.request import urlretrieve
from scipy import ndimage

In [4]:
# Directory to download dataset
DATASET_DIR = '/tmp/pipeline/datasets/notmnist/'

# Create the directory
!mkdir -p {DATASET_DIR}

In [5]:
def maybe_download(filename, url, force=False):
  """Download a file if not present."""
  if force or not os.path.exists(DATASET_DIR + filename):
    filename, _ = urlretrieve(url + filename, DATASET_DIR + filename)
    print('\nDownload complete for {}'.format(filename))
    return filename
  else:
    print('File {} already present.'.format(filename))
  return DATASET_DIR + filename

def maybe_extract(filename, force=False):
  root = os.path.splitext(os.path.splitext(filename)[0])[0]  # remove .tar.gz
  if os.path.isdir(root) and not force:
    # You may override by setting force=True.
    print('{} already present - don\'t need to extract {}.'.format(root, filename))
  else:
    print('Extracting data for {}. This may take a while. Please wait.'.format(root))
    tar = tarfile.open(filename)
    sys.stdout.flush()
    tar.extractall(root[0:root.rfind('/') + 1])
    tar.close()
  data_folders = [
    os.path.join(root, d) for d in sorted(os.listdir(root))
    if os.path.isdir(os.path.join(root, d))]
  print(data_folders)
  return data_folders

In [6]:
# Locations to download data:
url = 'http://yaroslavvb.com/upload/notMNIST/'

In [7]:
# Download two datasets
train_zip_path = maybe_download('notMNIST_small.tar.gz', url)


Download complete for /tmp/pipeline/datasets/notmnist/notMNIST_small.tar.gz

In [8]:
# Extract datasets
train_folders = maybe_extract(train_zip_path)


Extracting data for /tmp/pipeline/datasets/notmnist/notMNIST_small. This may take a while. Please wait.
['/tmp/pipeline/datasets/notmnist/notMNIST_small/A', '/tmp/pipeline/datasets/notmnist/notMNIST_small/B', '/tmp/pipeline/datasets/notmnist/notMNIST_small/C', '/tmp/pipeline/datasets/notmnist/notMNIST_small/D', '/tmp/pipeline/datasets/notmnist/notMNIST_small/E', '/tmp/pipeline/datasets/notmnist/notMNIST_small/F', '/tmp/pipeline/datasets/notmnist/notMNIST_small/G', '/tmp/pipeline/datasets/notmnist/notMNIST_small/H', '/tmp/pipeline/datasets/notmnist/notMNIST_small/I', '/tmp/pipeline/datasets/notmnist/notMNIST_small/J']

In [9]:
len(train_folders)


Out[9]:
10

In [10]:
image_height = 28  # Pixel height of images
image_width = 28  # Pixel width of images
pixel_depth = 255.0  # Number of levels per pixel
expected_img_shape = (image_height, image_width)  # Black and white image, no 3rd dimension
num_labels = len(train_folders)

def load_image_folder(folder):
  """Load the data for a single image label."""
  
  # Create a list of image paths inside the folder  
  image_files = os.listdir(folder)
  # Create empty numpy array to hold data
  dataset = np.ndarray(shape=(len(image_files), image_height, image_width),
                         dtype=np.float32)
  num_images = 0  # Counter for number of successful images loaded
  for image in image_files:
    image_file = os.path.join(folder, image)
    try:
      # Read in image pixel data as floating point values
      image_data = ndimage.imread(image_file).astype(float)
      # Scale values: [0.0, 255.0] => [-1.0, 1.0] 
      image_data = (image_data - pixel_depth / 2) / (pixel_depth / 2)
      if image_data.shape != expected_img_shape:
        print('File {} has unexpected dimensions: '.format(str(image_data.shape)))
        continue
      # Add image to the numpy array dataset
      dataset[num_images, :, :] = image_data
      num_images = num_images + 1
    except IOError as e:
      print('Could not read:', image_file, ':', e, '- skipping this file and moving on.')
  
  # Trim dataset to remove unused space
  dataset = dataset[0:num_images, :, :]
  return dataset

In [11]:
def make_data_label_arrays(num_rows, image_height, image_width):
  """
  Creates and returns empty numpy arrays for input data and labels
  """
  if num_rows:
    dataset = np.ndarray((num_rows, image_height, image_width), dtype=np.float32)
    labels = np.ndarray(num_rows, dtype=np.int32)
  else:
    dataset, labels = None, None
  return dataset, labels

def collect_datasets(data_folders):
  datasets = []
  total_images = 0
  for label, data_folder in enumerate(data_folders):
    # Bring all test folder images in as numpy arrays
    dataset = load_image_folder(data_folder)
    num_images = len(dataset)
    total_images += num_images
    datasets.append((dataset, label, num_images))
  return datasets, total_images

def merge_train_test_datasets(datasets, total_images, percent_test):
    num_train = total_images * (1.0 - percent_test)
    num_test = total_images * percent_test
    train_dataset, train_labels = make_data_label_arrays(num_train, image_height, image_width)
    test_dataset, test_labels = make_data_label_arrays(num_test, image_height, image_width)
    
    train_counter = 0
    test_counter = 0
    dataset_counter = 1
    for dataset, label, num_images in datasets:
      np.random.shuffle(dataset)
      if dataset_counter != len(datasets):
        n_v = num_images // (1.0 / percent_test)
        n_t = num_images - n_v
      else:
        # Last label, make sure dataset sizes match up to what we created
        n_v = len(test_dataset) - test_counter
        n_t = len(train_dataset) - train_counter
      train_dataset[train_counter: train_counter + n_t] = dataset[:n_t]
      train_labels[train_counter: train_counter + n_t] = label
      test_dataset[test_counter: test_counter + n_v] = dataset[n_t: n_t + n_v]
      test_labels[test_counter: test_counter + n_v] = label
      train_counter += n_t
      test_counter += n_v
      dataset_counter += 1
    return train_dataset, train_labels, test_dataset, test_labels

In [12]:
train_test_datasets, train_test_total_images = collect_datasets(train_folders)


('Could not read:', '/tmp/pipeline/datasets/notmnist/notMNIST_small/A/RGVtb2NyYXRpY2FCb2xkT2xkc3R5bGUgQm9sZC50dGY=.png', ':', IOError('cannot identify image file',), '- skipping this file and moving on.')
('Could not read:', '/tmp/pipeline/datasets/notmnist/notMNIST_small/F/Q3Jvc3NvdmVyIEJvbGRPYmxpcXVlLnR0Zg==.png', ':', IOError('cannot identify image file',), '- skipping this file and moving on.')

In [13]:
train_dataset, train_labels, test_dataset, test_labels = \
  merge_train_test_datasets(train_test_datasets, train_test_total_images, 0.1)


/usr/local/lib/python2.7/dist-packages/ipykernel/__main__.py:6: VisibleDeprecationWarning: using a non-integer number instead of an integer will result in an error in the future
/usr/local/lib/python2.7/dist-packages/ipykernel/__main__.py:7: VisibleDeprecationWarning: using a non-integer number instead of an integer will result in an error in the future
/usr/local/lib/python2.7/dist-packages/ipykernel/__main__.py:41: VisibleDeprecationWarning: using a non-integer number instead of an integer will result in an error in the future
/usr/local/lib/python2.7/dist-packages/ipykernel/__main__.py:42: VisibleDeprecationWarning: using a non-integer number instead of an integer will result in an error in the future
/usr/local/lib/python2.7/dist-packages/ipykernel/__main__.py:43: VisibleDeprecationWarning: using a non-integer number instead of an integer will result in an error in the future
/usr/local/lib/python2.7/dist-packages/ipykernel/__main__.py:44: VisibleDeprecationWarning: using a non-integer number instead of an integer will result in an error in the future

In [14]:
len(train_dataset)


Out[14]:
16851

In [15]:
# Convert data examples flattened vectors
# Convert labels to one-hot encoding
num_channels = 1  # grayscale
def reformat(dataset, labels):
  dataset = dataset.reshape( (-1, image_height * image_width)).astype(np.float32)
  labels = (np.arange(num_labels) == labels[:,None]).astype(np.float32)
  return dataset, labels

In [16]:
train_dataset, train_labels = reformat(train_dataset, train_labels)
test_dataset, test_labels = reformat(test_dataset, test_labels)

In [17]:
print('Training set', train_dataset.shape, train_labels.shape)
print('Test set', test_dataset.shape, test_labels.shape)


('Training set', (16851, 784), (16851, 10))
('Test set', (1872, 784), (1872, 10))

In [18]:
def shuffle_data_with_labels(dataset, labels):
    indices = range(len(dataset))
    np.random.shuffle(indices)
    new_data = np.ndarray(dataset.shape, dataset.dtype)
    new_labels = np.ndarray(labels.shape, dataset.dtype)
    n = 0
    for i in indices:
        new_data[n] = dataset[i]
        new_labels[n] = labels[i]
        n += 1
    return new_data, new_labels

In [19]:
train_dataset, train_labels = shuffle_data_with_labels(train_dataset, train_labels)

In [20]:
batch_size = 64

num_hidden_1 = 300
num_hidden_2 = 150
num_hidden_3 = 50

# Input data.
input_data = tf.placeholder(tf.float32, shape=(None, image_height * image_width), name="input_data")
input_labels = tf.placeholder(tf.float32, shape=(None, num_labels), name="input_labels")
  
with tf.name_scope('hidden1'):
  weights = tf.Variable(tf.truncated_normal([image_height * image_width, num_hidden_1]), name='weights')
  biases = tf.Variable(tf.zeros([num_hidden_1]), name='biases')
  hidden1 = tf.nn.sigmoid(tf.matmul(input_data, weights) + biases)

with tf.name_scope('hidden2'):
  weights = tf.Variable(tf.truncated_normal([num_hidden_1, num_hidden_2]), name='weights')
  biases = tf.Variable(tf.zeros([num_hidden_2]), name='biases')
  hidden2 = tf.nn.sigmoid(tf.matmul(hidden1, weights) + biases)

with tf.name_scope('hidden3'):
  weights = tf.Variable(tf.truncated_normal([num_hidden_2, num_hidden_3]), name='weights')
  biases = tf.Variable(tf.zeros([num_hidden_3]), name='biases')
  hidden3 = tf.nn.sigmoid(tf.matmul(hidden2, weights) + biases)

with tf.name_scope('output_layer'):
  weights = tf.Variable(tf.truncated_normal([num_hidden_3, num_labels]), name='weights')
  biases = tf.Variable(tf.zeros([num_labels]), name='biases')
  logits = tf.matmul(hidden3, weights) + biases

loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits, input_labels))
    
# Optimizer.
optimizer = tf.train.GradientDescentOptimizer(0.05).minimize(loss)
  
# Predictions for the training and test data.
model_prediction = tf.nn.softmax(logits, name="prediction")
label_prediction = tf.argmax(model_prediction, 1, name="predicted_label")
   
with tf.name_scope('summaries'):
  tf.scalar_summary('loss', loss)
  with tf.name_scope('accuracy'):
    correct_prediction = tf.equal(label_prediction, tf.argmax(input_labels, 1))
    model_accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
    tf.scalar_summary('accuracy', model_accuracy)
    
merged_summaries = tf.merge_all_summaries()

In [21]:
clean_tb_dir()

In [22]:
session = tf.Session()
writer = tf.train.SummaryWriter(TB_DIR, graph=session.graph)

In [24]:
num_steps = 1001

session.run(tf.initialize_all_variables())
print('Initialized')
for step in range(num_steps):
  offset = (step * batch_size) % (train_labels.shape[0] - batch_size)
  batch_data = train_dataset[offset:(offset + batch_size), :]
  batch_labels = train_labels[offset:(offset + batch_size), :]
  feed_dict = {input_data : batch_data, input_labels : batch_labels}
  _, l, predictions, accuracy, summaries = session.run(
    [optimizer, loss, model_prediction, model_accuracy, merged_summaries], feed_dict=feed_dict)
  if (step % 50 == 0):
    writer.add_summary(summaries, step)
    print('Minibatch loss at step %d: %f' % (step, l))
    print('Minibatch accuracy: {}'.format(accuracy))
test_dict = {input_data : test_dataset, input_labels : test_labels}
test_accuracy = session.run(model_accuracy, feed_dict=test_dict)
print('Test accuracy: {}'.format(test_accuracy))
writer.flush()


Initialized
Minibatch loss at step 0: 5.242322
Minibatch accuracy: 0.046875
Minibatch loss at step 50: 2.605259
Minibatch accuracy: 0.171875
Minibatch loss at step 100: 2.109148
Minibatch accuracy: 0.25
Minibatch loss at step 150: 1.940583
Minibatch accuracy: 0.3125
Minibatch loss at step 200: 1.954812
Minibatch accuracy: 0.3125
Minibatch loss at step 250: 1.641718
Minibatch accuracy: 0.390625
Minibatch loss at step 300: 1.802675
Minibatch accuracy: 0.390625
Minibatch loss at step 350: 1.655632
Minibatch accuracy: 0.390625
Minibatch loss at step 400: 1.346952
Minibatch accuracy: 0.578125
Minibatch loss at step 450: 1.279902
Minibatch accuracy: 0.546875
Minibatch loss at step 500: 1.386974
Minibatch accuracy: 0.5
Minibatch loss at step 550: 1.156456
Minibatch accuracy: 0.6875
Minibatch loss at step 600: 1.210849
Minibatch accuracy: 0.609375
Minibatch loss at step 650: 1.254033
Minibatch accuracy: 0.640625
Minibatch loss at step 700: 1.268145
Minibatch accuracy: 0.5625
Minibatch loss at step 750: 1.154119
Minibatch accuracy: 0.609375
Minibatch loss at step 800: 1.166272
Minibatch accuracy: 0.609375
Minibatch loss at step 850: 0.914112
Minibatch accuracy: 0.703125
Minibatch loss at step 900: 1.176358
Minibatch accuracy: 0.625
Minibatch loss at step 950: 1.122845
Minibatch accuracy: 0.5625
Minibatch loss at step 1000: 0.955626
Minibatch accuracy: 0.703125
Test accuracy: 0.671474337578

In [25]:
writer.close()

In [26]:
start_tensorboard()


WARNING:tensorflow:IOError [Errno 2] No such file or directory: '/usr/local/lib/python2.7/dist-packages/tensorflow/tensorboard/TAG' on path /usr/local/lib/python2.7/dist-packages/tensorflow/tensorboard/TAG
WARNING:tensorflow:Unable to read TensorBoard tag
Starting TensorBoard  on port 6006
(You can navigate to http://0.0.0.0:6006)
166.170.38.222 - - [07/Aug/2016 00:34:10] "GET / HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:11] "GET /external/plottable/plottable.css HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:11] "GET /lib/css/global.css HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:11] "GET /external/d3/d3.min.js HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:11] "GET /external/lodash/lodash.min.js HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:11] "GET /external/plottable/plottable.min.js HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:11] "GET /external/graphlib/dist/graphlib.core.min.js HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:11] "GET /external/dagre/dist/dagre.core.min.js HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:11] "GET /external/webcomponentsjs/webcomponents-lite.min.js HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:11] "GET /external/polymer/polymer.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:11] "GET /external/iron-ajax/iron-ajax.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:11] "GET /external/iron-list/iron-list.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:11] "GET /external/iron-collapse/iron-collapse.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:11] "GET /external/paper-button/paper-button.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:11] "GET /external/paper-checkbox/paper-checkbox.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:11] "GET /external/paper-dialog/paper-dialog.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:11] "GET /external/paper-dropdown-menu/paper-dropdown-menu.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:11] "GET /external/paper-input/paper-input.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:11] "GET /external/paper-icon-button/paper-icon-button.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:11] "GET /external/paper-header-panel/paper-header-panel.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:11] "GET /external/paper-item/paper-item.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /external/paper-menu/paper-menu.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /external/paper-progress/paper-progress.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /external/paper-radio-button/paper-radio-button.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /external/paper-radio-group/paper-radio-group.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /external/paper-slider/paper-slider.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /external/paper-styles/paper-styles.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /external/paper-toggle-button/paper-toggle-button.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /external/paper-toolbar/paper-toolbar.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /external/paper-tabs/paper-tabs.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /dist/tf-tensorboard.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /external/polymer/polymer-mini.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /external/polymer/polymer-micro.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /external/iron-ajax/iron-request.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /external/neon-animation/neon-animation-runner-behavior.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /external/paper-dialog-behavior/paper-dialog-behavior.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /external/iron-a11y-keys-behavior/iron-a11y-keys-behavior.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /external/paper-dialog-behavior/paper-dialog-shared-styles.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /external/iron-selector/iron-selectable.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /external/iron-flex-layout/iron-flex-layout.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /external/paper-styles/typography.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /external/paper-styles/default-theme.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /external/iron-form-element-behavior/iron-form-element-behavior.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /external/iron-range-behavior/iron-range-behavior.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /external/paper-menu-button/paper-menu-button.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /external/paper-styles/color.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /external/paper-behaviors/paper-inky-focus-behavior.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:12] "GET /external/paper-ripple/paper-ripple.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/iron-behaviors/iron-button-state.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/iron-behaviors/iron-control-state.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/iron-icon/iron-icon.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/iron-icons/iron-icons.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/iron-validatable-behavior/iron-validatable-behavior.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/iron-menu-behavior/iron-menu-behavior.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/paper-behaviors/paper-button-behavior.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/paper-material/paper-material.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/paper-menu/paper-menu-shared-styles.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/iron-input/iron-input.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/paper-behaviors/paper-checked-element-behavior.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/paper-input/paper-input-behavior.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/paper-input/paper-input-char-counter.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/paper-input/paper-input-container.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/paper-input/paper-input-error.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/iron-menu-behavior/iron-menubar-behavior.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/iron-resizable-behavior/iron-resizable-behavior.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/paper-tabs/paper-tabs-icons.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/paper-item/paper-item-behavior.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/paper-tabs/paper-tab.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/iron-flex-layout/classes/iron-flex-layout.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/paper-styles/shadow.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/promise-polyfill/promise-polyfill-lite.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/iron-meta/iron-meta.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/iron-overlay-behavior/iron-overlay-behavior.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/neon-animation/neon-animatable-behavior.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/iron-selector/iron-selection.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/font-roboto/roboto.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:13] "GET /external/paper-item/paper-item-shared-styles.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:14] "GET /external/iron-dropdown/iron-dropdown.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:14] "GET /external/neon-animation/animations/fade-in-animation.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:14] "GET /external/neon-animation/animations/fade-out-animation.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:14] "GET /external/paper-menu-button/paper-menu-button-animations.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:14] "GET /external/paper-behaviors/paper-ripple-behavior.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:14] "GET /external/iron-iconset-svg/iron-iconset-svg.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:14] "GET /external/iron-selector/iron-multi-selectable.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:14] "GET /external/iron-a11y-announcer/iron-a11y-announcer.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:14] "GET /external/paper-material/paper-material-shared-styles.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:14] "GET /external/iron-checked-element-behavior/iron-checked-element-behavior.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:14] "GET /external/paper-input/paper-input-addon-behavior.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:14] "GET /external/iron-flex-layout/classes/iron-shadow-flex-layout.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:14] "GET /external/promise-polyfill/Promise.js HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:14] "GET /external/iron-dropdown/iron-dropdown-scroll-manager.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:14] "GET /external/iron-fit-behavior/iron-fit-behavior.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:14] "GET /external/neon-animation/animations/opaque-animation.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:14] "GET /external/iron-overlay-behavior/iron-overlay-manager.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:14] "GET /external/neon-animation/neon-animation-behavior.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:14] "GET /external/neon-animation/web-animations.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:14] "GET /external/iron-overlay-behavior/iron-overlay-backdrop.html HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:14] "GET /external/web-animations-js/web-animations-next-lite.min.js HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:15] "GET /data/runs HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:15] "GET /data/runs HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:17] "GET /data/scalars?run=.&tag=accuracy HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:18] "GET /data/scalars?run=.&tag=loss HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:28] "GET /data/runs HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:28] "GET /data/runs HTTP/1.1" 200 -
166.170.38.222 - - [07/Aug/2016 00:34:28] "GET /data/graph?run=.&limit_attr_size=1024&large_attrs_key=_too_large_attrs HTTP/1.1" 200 -

In [27]:
stop_tensorboard()

In [28]:
clean_tb_dir()

In [33]:
# Visualize data:
import matplotlib.pyplot as plt
%matplotlib inline

i = np.random.randint(len(test_dataset))
data = test_dataset[i,:]
pixels = np.reshape(data, (image_height, image_width))
plt.imshow(pixels)

feed_me = np.ndarray((1, image_height * image_width), np.float32)
feed_me[0] = data
feed_dict = {input_data: feed_me}
prediction = session.run(label_prediction, feed_dict=feed_dict)
print("Predicted character: " + chr(prediction + ord('A')))
print("Actual label: " + chr(np.argmax(test_labels[i]) + ord('A')))


Predicted character: G
Actual label: G

In [ ]:
test_labels[3]

In [ ]:


In [ ]: