In [1]:
import os
import tensorflow as tf
import tensorflow.python.platform
from tensorflow.python.platform import gfile
import numpy as np
import glob

classes = np.array(['ayam_bakar', 'ayam_crispy', 'bakso', 'gado2', 'ikan_bakar', 'mie_goreng', 'nasi_goreng', 'pecel_lele', 'pizza', 'rendang', 'sate', 'soto', 'sushi'])
num_classes = len(classes)
temp = classes.tolist()
 
def create_graph(model_path):
    """
    create_graph loads the inception model to memory, should be called before
    calling extract_features.
 
    model_path: path to inception model in protobuf form.
    """
    with gfile.FastGFile(model_path, 'rb') as f:
        graph_def = tf.GraphDef()
        graph_def.ParseFromString(f.read())
        _ = tf.import_graph_def(graph_def, name='')
        print 'selesai'
 
 
def extract_features(image_paths, verbose=False):
    """
    extract_features computed the inception bottleneck feature for a list of images
 
    image_paths: array of image path
    return: 2-d array in the shape of (len(image_paths), 2048)
    """
    features = []
    cls = []
    
    with tf.Session() as sess:
        flattened_tensor = sess.graph.get_tensor_by_name('pool_3:0')
 
        for fld in classes:
            index = temp.index(fld)
            path = os.path.join(image_paths, fld, '*.jpg')
            files = glob.glob(path)
            
            for fl in files:
                label = np.zeros(len(classes))
                label[index] = 1.0
                print '{0}\r'.format(fl),
                image_data = gfile.FastGFile(fl, 'rb').read()
                feature = sess.run(flattened_tensor, {
                    'DecodeJpeg/contents:0': image_data
                })
                features.append(feature)
                cls.append(label)
                
    return features, cls

In [2]:
path = 'tensorflow_inception_graph.pb'
create_graph(path)


selesai

In [3]:
image_paths = 'data_training/'
images, cls = extract_features(image_paths, verbose=False)
print "selesai"


selesaiaining/sushi/sushi (43).jpgjpg.jpgpg

In [4]:
image_paths = 'data_testing/'
images_test, cls_test = extract_features(image_paths, verbose=False)
print "selesai"


selesaisting/sushi/sushi (22).jpgpy).jpg

In [5]:
print len(images)
print len(images_test)
num_sample = len(images)
num_test = len(images_test)


5330
300

In [6]:
img = np.asarray(images[700])
print img.shape
img = img.reshape((32,64))

import matplotlib.pyplot as plt
# Plot the image for the transfer-values.
plt.imshow(img, interpolation='nearest', cmap='Blues')
plt.show()


(1, 1, 1, 2048)

In [7]:
transfer_values = np.asarray(images)
transfer_values = transfer_values.reshape((num_sample,2048))
print transfer_values.shape

transfer_values_test = np.asarray(images_test)
transfer_values_test = transfer_values_test.reshape((num_test,2048))
print transfer_values_test.shape


(5330, 2048)
(300, 2048)

In [8]:
cls = np.asarray(cls)
cls = cls.astype(int)
print cls.shape

cls_test = np.asarray(cls_test)
cls_test = cls_test.astype(int)
print cls_test.shape


(5330, 13)
(300, 13)

In [9]:
print cls


[[1 0 0 ..., 0 0 0]
 [1 0 0 ..., 0 0 0]
 [1 0 0 ..., 0 0 0]
 ..., 
 [0 0 0 ..., 0 0 1]
 [0 0 0 ..., 0 0 1]
 [0 0 0 ..., 0 0 1]]

In [12]:
cls2 = []
for tmp in cls:
    for i, a in enumerate (tmp):
        if a==1:
            cls2.append(i)

In [13]:
from sklearn.decomposition import PCA
pca = PCA(n_components=2)
transfer_values_reduced = pca.fit_transform(transfer_values)

In [14]:
def plot_scatter(values, cls):
    # Create a color-map with a different color for each class.
    import matplotlib.cm as cm
    cmap = cm.rainbow(np.linspace(0.0, 1.0, num_classes))

    # Get the color for each sample.
    colors = cmap[cls]

    # Extract the x- and y-values.
    x = values[:, 0]
    y = values[:, 1]

    # Plot it.
    plt.scatter(x, y, color=colors)
    plt.show()

In [15]:
num_features = len(transfer_values[0])
plot_scatter(transfer_values_reduced, cls2)



In [17]:
from sklearn.manifold import TSNE

pca = PCA(n_components=50)
transfer_values_50d = pca.fit_transform(transfer_values)
tsne = TSNE(n_components=2)
transfer_values_reduced = tsne.fit_transform(transfer_values_50d)
plot_scatter(transfer_values_reduced, cls2)



In [22]:
def new_weights(shape):
    return tf.Variable(tf.truncated_normal(shape, stddev=0.05))
def new_biases(length):
    return tf.Variable(tf.constant(0.05, shape=[length]))

def new_fc_layer(input,          # The previous layer.
                 num_inputs,     # Num. inputs from prev. layer.
                 num_outputs,    # Num. outputs.
                 use_relu=True): # Use Rectified Linear Unit (ReLU)?

    # Create new weights and biases.
    weights = new_weights(shape=[num_inputs, num_outputs])
    biases = new_biases(length=num_outputs)

    # Calculate the layer as the matrix multiplication of
    # the input and weights, and then add the bias-values.
    layer = tf.matmul(input, weights) + biases

    # Use ReLU?
    if use_relu:
        layer = tf.nn.relu(layer)

    return layer

feature_size_flat = num_features
x = tf.placeholder(tf.float32, shape=[None, feature_size_flat], name='x')
y_true = tf.placeholder(tf.float32, shape=[None, num_classes], name='y_true')
y_true_cls = tf.argmax(y_true, axis=1)


layer_fc1 = new_fc_layer(input= x,
                         num_inputs=num_features,
                         num_outputs=1000,
                         use_relu=True)
layer_fc2 = new_fc_layer(input=layer_fc1,
                         num_inputs=1000,
                         num_outputs=num_classes,
                         use_relu=False)
y_pred = tf.nn.softmax(layer_fc2)
y_pred_cls = tf.argmax(y_pred, axis=1)
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits=layer_fc2,
                                                        labels=y_true)
cost = tf.reduce_mean(cross_entropy)
optimizer = tf.train.AdamOptimizer(learning_rate=1e-4).minimize(cost)
correct_prediction = tf.equal(y_pred_cls, y_true_cls)
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))

In [23]:
train_acc = []
test_acc = []

In [28]:
import time
saver = tf.train.Saver()
save_path = 'the_model2/model2'
session = tf.Session()
session.run(tf.global_variables_initializer())
#saver.restore(sess=session, save_path=save_path)


def optimize(num_iterations):
    # Ensure we update the global variable rather than a local copy.
    global total_iterations
    total_iterations = 0

    # Start-time used for printing time-usage below.
    start_time = time.time()
    
    best_validation_accuracy = 0.0
    best_training_accuracy = 0.0
    best_loss = 1000
    last_improvement = 0
    require_improvement = num_iterations-1

    for i in range (num_iterations):
        
        feed_dict_train = {x: transfer_values,
                           y_true: cls}
        
        feed_dict_validate = {x: transfer_values_test,
                              y_true: cls_test}

        # Run the optimizer using this batch of training data.
        # TensorFlow assigns the variables in feed_dict_train
        # to the placeholder variables and then runs the optimizer.
        session.run(optimizer, feed_dict=feed_dict_train)
        
        acc = session.run(accuracy, feed_dict=feed_dict_train)
        msg = "Optimization Iteration: {0:>6}, Training Accuracy: {1:>6.2%}, Validation Accuracy: {2:>6.2%}, Validation loss: {3:>6.2%} {4}"
        acc_val = session.run(accuracy, feed_dict=feed_dict_validate)
        val_loss = session.run(cost, feed_dict=feed_dict_validate)
            
        if (acc_val > best_validation_accuracy) or (acc_val >= best_validation_accuracy and acc >= best_training_accuracy and val_loss < best_loss) or (acc_val >= best_validation_accuracy and acc > best_training_accuracy and val_loss <= best_loss):
            best_validation_accuracy = acc_val
            best_training_accuracy = acc
            best_loss = val_loss
            last_improvement = total_iterations
            saver.save(sess=session, save_path=save_path)
            sign = '*'
                # Print it.
            print(msg.format(i, acc, acc_val, val_loss, sign))
        else:
            sign = '-'
            print(msg.format(i, acc, acc_val, val_loss, sign))
    
    
    #update total_iteration
    #total_iterations += num_iterations
  
    # Ending time.
    end_time = time.time()

    # Difference between start and end-times.
    time_dif = end_time - start_time

    # Print the time-usage.
    print("Time elapsed: " + str(timedelta(seconds=int(round(time_dif)))))

In [ ]:
optimize(num_iterations=5000)


Optimization Iteration:      0, Training Accuracy:  9.25%, Validation Accuracy:  8.33%, Validation loss: 260.87% *
Optimization Iteration:      1, Training Accuracy: 13.88%, Validation Accuracy: 11.33%, Validation loss: 247.70% *
Optimization Iteration:      2, Training Accuracy: 21.50%, Validation Accuracy: 17.67%, Validation loss: 237.75% *
Optimization Iteration:      3, Training Accuracy: 26.23%, Validation Accuracy: 23.33%, Validation loss: 229.12% *
Optimization Iteration:      4, Training Accuracy: 29.87%, Validation Accuracy: 26.00%, Validation loss: 220.19% *
Optimization Iteration:      5, Training Accuracy: 33.56%, Validation Accuracy: 28.33%, Validation loss: 210.34% *
Optimization Iteration:      6, Training Accuracy: 38.44%, Validation Accuracy: 32.33%, Validation loss: 199.92% *
Optimization Iteration:      7, Training Accuracy: 44.09%, Validation Accuracy: 39.00%, Validation loss: 189.63% *
Optimization Iteration:      8, Training Accuracy: 49.32%, Validation Accuracy: 46.33%, Validation loss: 179.83% *
Optimization Iteration:      9, Training Accuracy: 53.81%, Validation Accuracy: 52.33%, Validation loss: 170.75% *
Optimization Iteration:     10, Training Accuracy: 57.92%, Validation Accuracy: 55.33%, Validation loss: 162.36% *
Optimization Iteration:     11, Training Accuracy: 61.41%, Validation Accuracy: 59.67%, Validation loss: 154.51% *
Optimization Iteration:     12, Training Accuracy: 64.41%, Validation Accuracy: 62.67%, Validation loss: 147.08% *
Optimization Iteration:     13, Training Accuracy: 66.51%, Validation Accuracy: 65.33%, Validation loss: 139.99% *
Optimization Iteration:     14, Training Accuracy: 68.52%, Validation Accuracy: 67.00%, Validation loss: 133.22% *
Optimization Iteration:     15, Training Accuracy: 69.85%, Validation Accuracy: 70.00%, Validation loss: 126.79% *
Optimization Iteration:     16, Training Accuracy: 71.46%, Validation Accuracy: 74.67%, Validation loss: 120.76% *
Optimization Iteration:     17, Training Accuracy: 72.25%, Validation Accuracy: 77.33%, Validation loss: 115.12% *
Optimization Iteration:     18, Training Accuracy: 72.89%, Validation Accuracy: 77.67%, Validation loss: 109.90% *
Optimization Iteration:     19, Training Accuracy: 73.70%, Validation Accuracy: 77.67%, Validation loss: 105.06% *
Optimization Iteration:     20, Training Accuracy: 74.37%, Validation Accuracy: 77.33%, Validation loss: 100.55% -
Optimization Iteration:     21, Training Accuracy: 75.18%, Validation Accuracy: 78.67%, Validation loss: 96.29% *
Optimization Iteration:     22, Training Accuracy: 75.67%, Validation Accuracy: 78.67%, Validation loss: 92.19% *
Optimization Iteration:     23, Training Accuracy: 76.53%, Validation Accuracy: 80.33%, Validation loss: 88.27% *
Optimization Iteration:     24, Training Accuracy: 77.45%, Validation Accuracy: 81.33%, Validation loss: 84.54% *
Optimization Iteration:     25, Training Accuracy: 78.03%, Validation Accuracy: 80.67%, Validation loss: 81.02% -
Optimization Iteration:     26, Training Accuracy: 78.41%, Validation Accuracy: 81.33%, Validation loss: 77.73% -
Optimization Iteration:     27, Training Accuracy: 78.69%, Validation Accuracy: 82.67%, Validation loss: 74.66% *
Optimization Iteration:     28, Training Accuracy: 79.19%, Validation Accuracy: 85.00%, Validation loss: 71.76% *
Optimization Iteration:     29, Training Accuracy: 79.89%, Validation Accuracy: 85.33%, Validation loss: 69.02% *
Optimization Iteration:     30, Training Accuracy: 80.02%, Validation Accuracy: 85.67%, Validation loss: 66.45% *
Optimization Iteration:     31, Training Accuracy: 80.49%, Validation Accuracy: 87.00%, Validation loss: 64.03% *
Optimization Iteration:     32, Training Accuracy: 80.54%, Validation Accuracy: 88.00%, Validation loss: 61.78% *
Optimization Iteration:     33, Training Accuracy: 80.77%, Validation Accuracy: 88.00%, Validation loss: 59.70% *
Optimization Iteration:     34, Training Accuracy: 81.11%, Validation Accuracy: 88.67%, Validation loss: 57.78% *
Optimization Iteration:     35, Training Accuracy: 81.46%, Validation Accuracy: 89.33%, Validation loss: 55.99% *
Optimization Iteration:     36, Training Accuracy: 81.84%, Validation Accuracy: 89.67%, Validation loss: 54.31% *
Optimization Iteration:     37, Training Accuracy: 81.91%, Validation Accuracy: 90.33%, Validation loss: 52.71% *
Optimization Iteration:     38, Training Accuracy: 82.21%, Validation Accuracy: 90.33%, Validation loss: 51.19% *
Optimization Iteration:     39, Training Accuracy: 82.74%, Validation Accuracy: 90.33%, Validation loss: 49.75% *
Optimization Iteration:     40, Training Accuracy: 82.83%, Validation Accuracy: 90.33%, Validation loss: 48.39% *
Optimization Iteration:     41, Training Accuracy: 83.21%, Validation Accuracy: 90.33%, Validation loss: 47.11% *
Optimization Iteration:     42, Training Accuracy: 83.45%, Validation Accuracy: 90.67%, Validation loss: 45.90% *
Optimization Iteration:     43, Training Accuracy: 83.55%, Validation Accuracy: 91.00%, Validation loss: 44.76% *
Optimization Iteration:     44, Training Accuracy: 83.88%, Validation Accuracy: 91.00%, Validation loss: 43.68% *
Optimization Iteration:     45, Training Accuracy: 84.02%, Validation Accuracy: 91.00%, Validation loss: 42.66% *
Optimization Iteration:     46, Training Accuracy: 84.17%, Validation Accuracy: 91.00%, Validation loss: 41.72% *
Optimization Iteration:     47, Training Accuracy: 84.33%, Validation Accuracy: 91.00%, Validation loss: 40.84% *
Optimization Iteration:     48, Training Accuracy: 84.65%, Validation Accuracy: 91.33%, Validation loss: 40.02% *
Optimization Iteration:     49, Training Accuracy: 84.75%, Validation Accuracy: 91.33%, Validation loss: 39.25% *
Optimization Iteration:     50, Training Accuracy: 85.05%, Validation Accuracy: 91.33%, Validation loss: 38.52% *
Optimization Iteration:     51, Training Accuracy: 85.29%, Validation Accuracy: 91.33%, Validation loss: 37.84% *
Optimization Iteration:     52, Training Accuracy: 85.65%, Validation Accuracy: 91.33%, Validation loss: 37.18% *
Optimization Iteration:     53, Training Accuracy: 85.72%, Validation Accuracy: 91.33%, Validation loss: 36.57% *
Optimization Iteration:     54, Training Accuracy: 85.87%, Validation Accuracy: 91.33%, Validation loss: 35.98% *
Optimization Iteration:     55, Training Accuracy: 86.08%, Validation Accuracy: 91.67%, Validation loss: 35.42% *
Optimization Iteration:     56, Training Accuracy: 86.27%, Validation Accuracy: 91.67%, Validation loss: 34.89% *
Optimization Iteration:     57, Training Accuracy: 86.42%, Validation Accuracy: 91.67%, Validation loss: 34.38% *
Optimization Iteration:     58, Training Accuracy: 86.70%, Validation Accuracy: 91.67%, Validation loss: 33.90% *
Optimization Iteration:     59, Training Accuracy: 86.85%, Validation Accuracy: 91.67%, Validation loss: 33.43% *
Optimization Iteration:     60, Training Accuracy: 87.02%, Validation Accuracy: 91.67%, Validation loss: 32.97% *
Optimization Iteration:     61, Training Accuracy: 87.20%, Validation Accuracy: 91.67%, Validation loss: 32.53% *
Optimization Iteration:     62, Training Accuracy: 87.32%, Validation Accuracy: 91.67%, Validation loss: 32.12% *
Optimization Iteration:     63, Training Accuracy: 87.52%, Validation Accuracy: 91.67%, Validation loss: 31.73% *
Optimization Iteration:     64, Training Accuracy: 87.58%, Validation Accuracy: 92.00%, Validation loss: 31.36% *
Optimization Iteration:     65, Training Accuracy: 87.80%, Validation Accuracy: 91.33%, Validation loss: 31.01% -
Optimization Iteration:     66, Training Accuracy: 87.99%, Validation Accuracy: 91.33%, Validation loss: 30.68% -
Optimization Iteration:     67, Training Accuracy: 88.22%, Validation Accuracy: 91.33%, Validation loss: 30.36% -
Optimization Iteration:     68, Training Accuracy: 88.37%, Validation Accuracy: 91.33%, Validation loss: 30.05% -
Optimization Iteration:     69, Training Accuracy: 88.46%, Validation Accuracy: 91.33%, Validation loss: 29.75% -
Optimization Iteration:     70, Training Accuracy: 88.65%, Validation Accuracy: 91.67%, Validation loss: 29.45% -
Optimization Iteration:     71, Training Accuracy: 88.80%, Validation Accuracy: 92.00%, Validation loss: 29.17% *
Optimization Iteration:     72, Training Accuracy: 88.91%, Validation Accuracy: 92.00%, Validation loss: 28.89% *
Optimization Iteration:     73, Training Accuracy: 89.02%, Validation Accuracy: 92.00%, Validation loss: 28.62% *
Optimization Iteration:     74, Training Accuracy: 89.08%, Validation Accuracy: 92.00%, Validation loss: 28.35% *
Optimization Iteration:     75, Training Accuracy: 89.19%, Validation Accuracy: 92.00%, Validation loss: 28.10% *
Optimization Iteration:     76, Training Accuracy: 89.36%, Validation Accuracy: 92.00%, Validation loss: 27.86% *
Optimization Iteration:     77, Training Accuracy: 89.44%, Validation Accuracy: 92.00%, Validation loss: 27.62% *
Optimization Iteration:     78, Training Accuracy: 89.62%, Validation Accuracy: 91.67%, Validation loss: 27.40% -
Optimization Iteration:     79, Training Accuracy: 89.74%, Validation Accuracy: 91.67%, Validation loss: 27.18% -
Optimization Iteration:     80, Training Accuracy: 89.77%, Validation Accuracy: 91.67%, Validation loss: 26.97% -
Optimization Iteration:     81, Training Accuracy: 89.91%, Validation Accuracy: 91.67%, Validation loss: 26.77% -
Optimization Iteration:     82, Training Accuracy: 90.02%, Validation Accuracy: 92.00%, Validation loss: 26.57% *
Optimization Iteration:     83, Training Accuracy: 90.08%, Validation Accuracy: 92.00%, Validation loss: 26.37% *
Optimization Iteration:     84, Training Accuracy: 90.19%, Validation Accuracy: 92.00%, Validation loss: 26.17% *
Optimization Iteration:     85, Training Accuracy: 90.34%, Validation Accuracy: 92.00%, Validation loss: 25.97% *
Optimization Iteration:     86, Training Accuracy: 90.47%, Validation Accuracy: 92.00%, Validation loss: 25.77% *
Optimization Iteration:     87, Training Accuracy: 90.56%, Validation Accuracy: 92.00%, Validation loss: 25.58% *
Optimization Iteration:     88, Training Accuracy: 90.66%, Validation Accuracy: 92.00%, Validation loss: 25.40% *
Optimization Iteration:     89, Training Accuracy: 90.75%, Validation Accuracy: 92.00%, Validation loss: 25.22% *
Optimization Iteration:     90, Training Accuracy: 90.83%, Validation Accuracy: 92.00%, Validation loss: 25.05% *
Optimization Iteration:     91, Training Accuracy: 90.88%, Validation Accuracy: 92.00%, Validation loss: 24.89% *
Optimization Iteration:     92, Training Accuracy: 91.07%, Validation Accuracy: 92.00%, Validation loss: 24.73% *
Optimization Iteration:     93, Training Accuracy: 91.16%, Validation Accuracy: 92.00%, Validation loss: 24.58% *
Optimization Iteration:     94, Training Accuracy: 91.26%, Validation Accuracy: 92.33%, Validation loss: 24.43% *
Optimization Iteration:     95, Training Accuracy: 91.33%, Validation Accuracy: 92.33%, Validation loss: 24.28% *
Optimization Iteration:     96, Training Accuracy: 91.33%, Validation Accuracy: 92.33%, Validation loss: 24.13% *
Optimization Iteration:     97, Training Accuracy: 91.44%, Validation Accuracy: 92.33%, Validation loss: 23.99% *
Optimization Iteration:     98, Training Accuracy: 91.48%, Validation Accuracy: 92.33%, Validation loss: 23.85% *
Optimization Iteration:     99, Training Accuracy: 91.63%, Validation Accuracy: 92.33%, Validation loss: 23.71% *
Optimization Iteration:    100, Training Accuracy: 91.69%, Validation Accuracy: 92.33%, Validation loss: 23.57% *
Optimization Iteration:    101, Training Accuracy: 91.78%, Validation Accuracy: 92.33%, Validation loss: 23.44% *
Optimization Iteration:    102, Training Accuracy: 91.88%, Validation Accuracy: 92.33%, Validation loss: 23.31% *
Optimization Iteration:    103, Training Accuracy: 91.97%, Validation Accuracy: 92.33%, Validation loss: 23.19% *
Optimization Iteration:    104, Training Accuracy: 92.03%, Validation Accuracy: 92.33%, Validation loss: 23.07% *
Optimization Iteration:    105, Training Accuracy: 92.05%, Validation Accuracy: 92.33%, Validation loss: 22.95% *
Optimization Iteration:    106, Training Accuracy: 92.16%, Validation Accuracy: 92.33%, Validation loss: 22.83% *
Optimization Iteration:    107, Training Accuracy: 92.23%, Validation Accuracy: 92.33%, Validation loss: 22.71% *
Optimization Iteration:    108, Training Accuracy: 92.25%, Validation Accuracy: 92.33%, Validation loss: 22.60% *
Optimization Iteration:    109, Training Accuracy: 92.35%, Validation Accuracy: 92.33%, Validation loss: 22.48% *
Optimization Iteration:    110, Training Accuracy: 92.40%, Validation Accuracy: 92.33%, Validation loss: 22.36% *
Optimization Iteration:    111, Training Accuracy: 92.46%, Validation Accuracy: 92.33%, Validation loss: 22.24% *
Optimization Iteration:    112, Training Accuracy: 92.51%, Validation Accuracy: 92.33%, Validation loss: 22.13% *
Optimization Iteration:    113, Training Accuracy: 92.61%, Validation Accuracy: 92.67%, Validation loss: 22.03% *
Optimization Iteration:    114, Training Accuracy: 92.68%, Validation Accuracy: 92.67%, Validation loss: 21.92% *
Optimization Iteration:    115, Training Accuracy: 92.74%, Validation Accuracy: 92.67%, Validation loss: 21.82% *
Optimization Iteration:    116, Training Accuracy: 92.80%, Validation Accuracy: 92.67%, Validation loss: 21.72% *
Optimization Iteration:    117, Training Accuracy: 92.83%, Validation Accuracy: 92.67%, Validation loss: 21.62% *
Optimization Iteration:    118, Training Accuracy: 92.89%, Validation Accuracy: 92.67%, Validation loss: 21.52% *
Optimization Iteration:    119, Training Accuracy: 92.95%, Validation Accuracy: 92.67%, Validation loss: 21.43% *
Optimization Iteration:    120, Training Accuracy: 92.98%, Validation Accuracy: 92.67%, Validation loss: 21.34% *
Optimization Iteration:    121, Training Accuracy: 93.00%, Validation Accuracy: 92.67%, Validation loss: 21.25% *
Optimization Iteration:    122, Training Accuracy: 93.04%, Validation Accuracy: 92.67%, Validation loss: 21.16% *
Optimization Iteration:    123, Training Accuracy: 93.15%, Validation Accuracy: 92.67%, Validation loss: 21.07% *
Optimization Iteration:    124, Training Accuracy: 93.19%, Validation Accuracy: 92.67%, Validation loss: 20.98% *
Optimization Iteration:    125, Training Accuracy: 93.21%, Validation Accuracy: 92.67%, Validation loss: 20.89% *
Optimization Iteration:    126, Training Accuracy: 93.25%, Validation Accuracy: 92.67%, Validation loss: 20.80% *
Optimization Iteration:    127, Training Accuracy: 93.26%, Validation Accuracy: 92.67%, Validation loss: 20.72% *
Optimization Iteration:    128, Training Accuracy: 93.30%, Validation Accuracy: 92.67%, Validation loss: 20.64% *
Optimization Iteration:    129, Training Accuracy: 93.36%, Validation Accuracy: 92.67%, Validation loss: 20.56% *
Optimization Iteration:    130, Training Accuracy: 93.43%, Validation Accuracy: 92.67%, Validation loss: 20.48% *
Optimization Iteration:    131, Training Accuracy: 93.45%, Validation Accuracy: 92.67%, Validation loss: 20.40% *
Optimization Iteration:    132, Training Accuracy: 93.49%, Validation Accuracy: 92.67%, Validation loss: 20.32% *
Optimization Iteration:    133, Training Accuracy: 93.58%, Validation Accuracy: 92.67%, Validation loss: 20.24% *
Optimization Iteration:    134, Training Accuracy: 93.68%, Validation Accuracy: 92.67%, Validation loss: 20.16% *
Optimization Iteration:    135, Training Accuracy: 93.77%, Validation Accuracy: 92.67%, Validation loss: 20.09% *
Optimization Iteration:    136, Training Accuracy: 93.81%, Validation Accuracy: 92.67%, Validation loss: 20.01% *
Optimization Iteration:    137, Training Accuracy: 93.85%, Validation Accuracy: 92.67%, Validation loss: 19.94% *
Optimization Iteration:    138, Training Accuracy: 93.94%, Validation Accuracy: 92.67%, Validation loss: 19.87% *
Optimization Iteration:    139, Training Accuracy: 93.98%, Validation Accuracy: 92.67%, Validation loss: 19.80% *
Optimization Iteration:    140, Training Accuracy: 94.03%, Validation Accuracy: 92.67%, Validation loss: 19.72% *
Optimization Iteration:    141, Training Accuracy: 94.13%, Validation Accuracy: 92.67%, Validation loss: 19.65% *
Optimization Iteration:    142, Training Accuracy: 94.22%, Validation Accuracy: 92.67%, Validation loss: 19.58% *
Optimization Iteration:    143, Training Accuracy: 94.32%, Validation Accuracy: 92.67%, Validation loss: 19.51% *
Optimization Iteration:    144, Training Accuracy: 94.35%, Validation Accuracy: 92.67%, Validation loss: 19.44% *
Optimization Iteration:    145, Training Accuracy: 94.43%, Validation Accuracy: 92.67%, Validation loss: 19.38% *
Optimization Iteration:    146, Training Accuracy: 94.48%, Validation Accuracy: 92.67%, Validation loss: 19.31% *
Optimization Iteration:    147, Training Accuracy: 94.48%, Validation Accuracy: 92.67%, Validation loss: 19.24% -
Optimization Iteration:    148, Training Accuracy: 94.54%, Validation Accuracy: 92.67%, Validation loss: 19.18% *
Optimization Iteration:    149, Training Accuracy: 94.58%, Validation Accuracy: 92.67%, Validation loss: 19.12% *
Optimization Iteration:    150, Training Accuracy: 94.62%, Validation Accuracy: 92.67%, Validation loss: 19.05% *
Optimization Iteration:    151, Training Accuracy: 94.63%, Validation Accuracy: 92.67%, Validation loss: 18.99% *
Optimization Iteration:    152, Training Accuracy: 94.65%, Validation Accuracy: 92.67%, Validation loss: 18.93% *
Optimization Iteration:    153, Training Accuracy: 94.69%, Validation Accuracy: 92.67%, Validation loss: 18.87% *
Optimization Iteration:    154, Training Accuracy: 94.73%, Validation Accuracy: 92.67%, Validation loss: 18.81% *
Optimization Iteration:    155, Training Accuracy: 94.84%, Validation Accuracy: 93.00%, Validation loss: 18.75% *
Optimization Iteration:    156, Training Accuracy: 94.88%, Validation Accuracy: 93.00%, Validation loss: 18.69% *
Optimization Iteration:    157, Training Accuracy: 94.95%, Validation Accuracy: 93.00%, Validation loss: 18.63% *
Optimization Iteration:    158, Training Accuracy: 95.07%, Validation Accuracy: 93.00%, Validation loss: 18.57% *
Optimization Iteration:    159, Training Accuracy: 95.14%, Validation Accuracy: 93.00%, Validation loss: 18.51% *
Optimization Iteration:    160, Training Accuracy: 95.20%, Validation Accuracy: 93.00%, Validation loss: 18.46% *
Optimization Iteration:    161, Training Accuracy: 95.22%, Validation Accuracy: 93.33%, Validation loss: 18.41% *
Optimization Iteration:    162, Training Accuracy: 95.27%, Validation Accuracy: 93.33%, Validation loss: 18.35% *
Optimization Iteration:    163, Training Accuracy: 95.31%, Validation Accuracy: 93.33%, Validation loss: 18.30% *
Optimization Iteration:    164, Training Accuracy: 95.31%, Validation Accuracy: 93.33%, Validation loss: 18.25% *
Optimization Iteration:    165, Training Accuracy: 95.33%, Validation Accuracy: 93.33%, Validation loss: 18.20% *
Optimization Iteration:    166, Training Accuracy: 95.35%, Validation Accuracy: 93.33%, Validation loss: 18.15% *
Optimization Iteration:    167, Training Accuracy: 95.38%, Validation Accuracy: 93.33%, Validation loss: 18.10% *
Optimization Iteration:    168, Training Accuracy: 95.40%, Validation Accuracy: 93.33%, Validation loss: 18.05% *
Optimization Iteration:    169, Training Accuracy: 95.44%, Validation Accuracy: 93.33%, Validation loss: 18.00% *
Optimization Iteration:    170, Training Accuracy: 95.44%, Validation Accuracy: 93.33%, Validation loss: 17.96% *
Optimization Iteration:    171, Training Accuracy: 95.46%, Validation Accuracy: 93.33%, Validation loss: 17.91% *
Optimization Iteration:    172, Training Accuracy: 95.53%, Validation Accuracy: 93.33%, Validation loss: 17.86% *
Optimization Iteration:    173, Training Accuracy: 95.61%, Validation Accuracy: 93.67%, Validation loss: 17.82% *
Optimization Iteration:    174, Training Accuracy: 95.65%, Validation Accuracy: 93.67%, Validation loss: 17.78% *
Optimization Iteration:    175, Training Accuracy: 95.72%, Validation Accuracy: 93.67%, Validation loss: 17.74% *
Optimization Iteration:    176, Training Accuracy: 95.74%, Validation Accuracy: 93.67%, Validation loss: 17.69% *
Optimization Iteration:    177, Training Accuracy: 95.78%, Validation Accuracy: 93.67%, Validation loss: 17.65% *
Optimization Iteration:    178, Training Accuracy: 95.85%, Validation Accuracy: 93.67%, Validation loss: 17.61% *
Optimization Iteration:    179, Training Accuracy: 95.91%, Validation Accuracy: 93.67%, Validation loss: 17.57% *
Optimization Iteration:    180, Training Accuracy: 95.93%, Validation Accuracy: 93.67%, Validation loss: 17.53% *
Optimization Iteration:    181, Training Accuracy: 95.93%, Validation Accuracy: 93.67%, Validation loss: 17.49% *
Optimization Iteration:    182, Training Accuracy: 95.98%, Validation Accuracy: 93.67%, Validation loss: 17.45% *
Optimization Iteration:    183, Training Accuracy: 96.00%, Validation Accuracy: 93.67%, Validation loss: 17.41% *
Optimization Iteration:    184, Training Accuracy: 96.04%, Validation Accuracy: 93.67%, Validation loss: 17.37% *
Optimization Iteration:    185, Training Accuracy: 96.06%, Validation Accuracy: 93.67%, Validation loss: 17.33% *
Optimization Iteration:    186, Training Accuracy: 96.14%, Validation Accuracy: 93.67%, Validation loss: 17.29% *
Optimization Iteration:    187, Training Accuracy: 96.19%, Validation Accuracy: 93.67%, Validation loss: 17.26% *
Optimization Iteration:    188, Training Accuracy: 96.21%, Validation Accuracy: 93.67%, Validation loss: 17.22% *
Optimization Iteration:    189, Training Accuracy: 96.23%, Validation Accuracy: 93.33%, Validation loss: 17.19% -
Optimization Iteration:    190, Training Accuracy: 96.27%, Validation Accuracy: 93.33%, Validation loss: 17.15% -
Optimization Iteration:    191, Training Accuracy: 96.29%, Validation Accuracy: 93.33%, Validation loss: 17.12% -
Optimization Iteration:    192, Training Accuracy: 96.34%, Validation Accuracy: 93.33%, Validation loss: 17.08% -
Optimization Iteration:    193, Training Accuracy: 96.38%, Validation Accuracy: 93.33%, Validation loss: 17.05% -
Optimization Iteration:    194, Training Accuracy: 96.40%, Validation Accuracy: 93.33%, Validation loss: 17.01% -
Optimization Iteration:    195, Training Accuracy: 96.45%, Validation Accuracy: 93.33%, Validation loss: 16.98% -
Optimization Iteration:    196, Training Accuracy: 96.51%, Validation Accuracy: 93.33%, Validation loss: 16.95% -
Optimization Iteration:    197, Training Accuracy: 96.51%, Validation Accuracy: 93.33%, Validation loss: 16.92% -
Optimization Iteration:    198, Training Accuracy: 96.57%, Validation Accuracy: 93.33%, Validation loss: 16.88% -
Optimization Iteration:    199, Training Accuracy: 96.59%, Validation Accuracy: 93.33%, Validation loss: 16.85% -
Optimization Iteration:    200, Training Accuracy: 96.62%, Validation Accuracy: 93.33%, Validation loss: 16.82% -
Optimization Iteration:    201, Training Accuracy: 96.68%, Validation Accuracy: 93.33%, Validation loss: 16.79% -
Optimization Iteration:    202, Training Accuracy: 96.70%, Validation Accuracy: 93.33%, Validation loss: 16.76% -
Optimization Iteration:    203, Training Accuracy: 96.70%, Validation Accuracy: 93.67%, Validation loss: 16.73% *
Optimization Iteration:    204, Training Accuracy: 96.74%, Validation Accuracy: 93.67%, Validation loss: 16.70% *
Optimization Iteration:    205, Training Accuracy: 96.81%, Validation Accuracy: 93.67%, Validation loss: 16.67% *
Optimization Iteration:    206, Training Accuracy: 96.85%, Validation Accuracy: 93.67%, Validation loss: 16.64% *
Optimization Iteration:    207, Training Accuracy: 96.85%, Validation Accuracy: 93.67%, Validation loss: 16.61% -
Optimization Iteration:    208, Training Accuracy: 96.87%, Validation Accuracy: 93.67%, Validation loss: 16.58% *
Optimization Iteration:    209, Training Accuracy: 96.94%, Validation Accuracy: 93.67%, Validation loss: 16.56% *
Optimization Iteration:    210, Training Accuracy: 96.96%, Validation Accuracy: 93.67%, Validation loss: 16.53% *
Optimization Iteration:    211, Training Accuracy: 97.00%, Validation Accuracy: 93.67%, Validation loss: 16.50% *
Optimization Iteration:    212, Training Accuracy: 97.00%, Validation Accuracy: 93.67%, Validation loss: 16.47% *
Optimization Iteration:    213, Training Accuracy: 97.02%, Validation Accuracy: 93.67%, Validation loss: 16.44% *
Optimization Iteration:    214, Training Accuracy: 97.07%, Validation Accuracy: 93.67%, Validation loss: 16.41% *
Optimization Iteration:    215, Training Accuracy: 97.09%, Validation Accuracy: 93.67%, Validation loss: 16.39% *
Optimization Iteration:    216, Training Accuracy: 97.11%, Validation Accuracy: 93.67%, Validation loss: 16.36% *
Optimization Iteration:    217, Training Accuracy: 97.11%, Validation Accuracy: 93.67%, Validation loss: 16.33% *
Optimization Iteration:    218, Training Accuracy: 97.15%, Validation Accuracy: 93.67%, Validation loss: 16.31% *
Optimization Iteration:    219, Training Accuracy: 97.17%, Validation Accuracy: 93.67%, Validation loss: 16.28% *
Optimization Iteration:    220, Training Accuracy: 97.19%, Validation Accuracy: 93.67%, Validation loss: 16.26% *
Optimization Iteration:    221, Training Accuracy: 97.20%, Validation Accuracy: 93.67%, Validation loss: 16.23% *

In [45]:
plt.plot(train_acc, color = 'blue')
plt.title('Test Accuracy')
plt.xlim(0, 5500)
plt.ylim(0, 1)
plt.xlabel('epoch')
plt.ylabel('accuracy (%)')
plt.show()



In [44]:
plt.plot(test_acc, color = 'red')
plt.title('Test Accuracy')
plt.xlim(0, 5500)
plt.ylim(0, 1)
plt.xlabel('epoch')
plt.ylabel('accuracy (%)')
plt.show()



In [51]:
plt.figure(1)
plt.title('Train and Test Accuracy')
plt.plot(train_acc, color = 'blue')
plt.xlim(0, 5500)
plt.ylim(0, 1)
plt.xlabel('epoch')
plt.ylabel('accuracy (%)')

plt.figure(1)
plt.plot(test_acc, color = 'red')
plt.xlim(0, 5500)
plt.ylim(0, 1)

plt.show()



In [53]:
def plot_confusion_matrix(cls_pred):
    # cls_pred is an array of the predicted class-number for
    # all images in the test-set.

    # Get the true classifications for the test-set.
    cls_true = data.valid.cls
    
    # Get the confusion matrix using sklearn.
    cm = confusion_matrix(y_true=cls_true,
                          y_pred=cls_pred)

    # Print the confusion matrix as text.
    print(cm)

    # Plot the confusion matrix as an image.
    plt.matshow(cm)

    # Make various adjustments to the plot.
    plt.colorbar()
    tick_marks = np.arange(num_classes)
    plt.xticks(tick_marks, range(num_classes))
    plt.yticks(tick_marks, range(num_classes))
    plt.xlabel('Predicted')
    plt.ylabel('True')

    # Ensure the plot is shown correctly with multiple plots
    # in a single Notebook cell.
    plt.show()
    
def plot_example_errors(cls_pred, correct):
    # cls_pred is an array of the predicted class-number for
    # all images in the test-set.

    # correct is a boolean array whether the predicted class
    # is equal to the true class for each image in the test-set.

    # Negate the boolean array.
    incorrect = (correct == False)
    
    # Get the images from the test-set that have been
    # incorrectly classified.
    images = data.valid.images[incorrect]
    
    # Get the predicted classes for those images.
    cls_pred = cls_pred[incorrect]

    # Get the true classes for those images.
    cls_true = data.valid.cls[incorrect]
    
    # Plot the first 9 images.
    plot_images(images=images[0:9],
                cls_true=cls_true[0:9],
                cls_pred=cls_pred[0:9])

In [59]:
def print_validation_accuracy(show_example_errors=False,
                        show_confusion_matrix=False):

    # Allocate an array for the predicted classes which
    # will be calculated in batches and filled into this array.
    cls_pred = np.zeros(shape=num_test, dtype=np.int)

    # Now calculate the predicted classes for the batches.
    # We will just iterate through all the batches.
    # There might be a more clever and Pythonic way of doing this.

    # The starting index for the next batch is denoted i.
    i = 0

    while i < num_test:
        # The ending index for the next batch is denoted j.
        j = min(i + 50, num_test)

        # Get the images from the test-set between index i and j.
        images = data.valid.images[i:j, :].reshape(batch_size, img_size_flat)
        

        # Get the associated labels.
        labels = data.valid.labels[i:j, :]

        # Create a feed-dict with these images and labels.
        feed_dict = {x: images,
                     y_true: labels}

        # Calculate the predicted class using TensorFlow.
        cls_pred[i:j] = session.run(y_pred_cls, feed_dict=feed_dict)

        # Set the start-index for the next batch to the
        # end-index of the current batch.
        i = j

    cls_true = np.array(data.valid.cls)
    cls_pred = np.array([classes[x] for x in cls_pred]) 

    # Create a boolean array whether each image is correctly classified.
    correct = (cls_true == cls_pred)

    # Calculate the number of correctly classified images.
    # When summing a boolean array, False means 0 and True means 1.
    correct_sum = correct.sum()

    # Classification accuracy is the number of correctly classified
    # images divided by the total number of images in the test-set.
    acc = float(correct_sum) / num_test

    # Print the accuracy.
    msg = "Accuracy on Test-Set: {0:.1%} ({1} / {2})"
    print(msg.format(acc, correct_sum, num_test))

    # Plot some examples of mis-classifications, if desired.
    if show_example_errors:
        print("Example errors:")
        plot_example_errors(cls_pred=cls_pred, correct=correct)

    # Plot the confusion matrix, if desired.
    if show_confusion_matrix:
        print("Confusion Matrix:")
        plot_confusion_matrix(cls_pred=cls_pred)


---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-59-6acad1e19d72> in <module>()
      6                     y_true: y_batch_test}
      7 
----> 8     cls_pred[i] = session.run(y_pred_cls, feed_dict=feed_dict_test)

/usr/local/lib/python2.7/dist-packages/tensorflow/python/client/session.pyc in run(self, fetches, feed_dict, options, run_metadata)
    765     try:
    766       result = self._run(None, fetches, feed_dict, options_ptr,
--> 767                          run_metadata_ptr)
    768       if run_metadata:
    769         proto_data = tf_session.TF_GetBuffer(run_metadata_ptr)

/usr/local/lib/python2.7/dist-packages/tensorflow/python/client/session.pyc in _run(self, handle, fetches, feed_dict, options, run_metadata)
    942                 'Cannot feed value of shape %r for Tensor %r, '
    943                 'which has shape %r'
--> 944                 % (np_val.shape, subfeed_t.name, str(subfeed_t.get_shape())))
    945           if not self.graph.is_feedable(subfeed_t):
    946             raise ValueError('Tensor %s may not be fed.' % subfeed_t)

ValueError: Cannot feed value of shape (2048,) for Tensor u'x:0', which has shape '(?, 2048)'

In [60]:
print tensor_values


---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
<ipython-input-60-6ebce9a61ff7> in <module>()
----> 1 print tensor_values

NameError: name 'tensor_values' is not defined

In [ ]: