In [1]:
import matplotlib.pyplot as plt
import urllib
from bs4 import BeautifulSoup
from selenium import webdriver
from matplotlib import pyplot as plt
%matplotlib inline
import re
import os,sys, shutil
import time
from datetime import date
try:
    import cPickle as pickle
except:
    import pickle
import pprint
from collections import deque
from shutil import copyfile
import random
import glob
# Import the required modules
import cv2, os
import numpy as np
from PIL import Image
from sklearn.cross_validation import KFold
import numpy as np
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score
# Logistic Regression
from sklearn import datasets
from sklearn import metrics
import pandas as pd
import math
from six.moves import xrange  # pylint: disable=redefined-builtin
import tensorflow as tf

In [2]:
pkl_fl = open("linkedin_profiles.pickle","rb")
my_original_list=pickle.load(pkl_fl) # errors out here
pkl_fl.close()

In [3]:
directory = "Male"
    
if os.path.exists(directory):
    shutil.rmtree(directory)
    os.makedirs(directory)     
else:
    os.makedirs(directory) 

directory1 = "Female"

if os.path.exists(directory1):
    shutil.rmtree(directory1)
    os.makedirs(directory1)     
else:
    os.makedirs(directory1)     

directory2 = "Label_Images_Gender"

if os.path.exists(directory2):
    shutil.rmtree(directory2)
    os.makedirs(directory2)     
else:
    os.makedirs(directory2)     
    
fileList = glob.glob("./Images/*.*")

for id,fp in enumerate(fileList):
    filename, file_extension = os.path.splitext(fp)
    uid = filename.split('/')[-1]
    #print fp
    for prof in my_original_list:
        if prof['User_ID'] == uid:
            new_file_extension = prof['Gender']
            new_file_extension = new_file_extension.title()
            if new_file_extension != 'Unknown':
                copyfile(filename+".jpg", './Label_Images_Gender/'+ uid + '.' + str(id) + "."+new_file_extension +'.jpg')
                copyfile(filename+".jpg", new_file_extension +'/' + uid + ".jpg")

In [3]:
# For face detection we will use the Haar Cascade provided by OpenCV.
cascadePath = "haarcascade_frontalface_default.xml"
faceCascade = cv2.CascadeClassifier(cascadePath)

# For face recognition we will the the LBPH Face Recognizer 
recognizer = cv2.createLBPHFaceRecognizer()

In [4]:
def get_images_and_labels(path):
    # Append all the absolute image paths in a list image_paths
    
    image_paths = [os.path.join(path, f) for f in os.listdir(path)]
    # images will contains face images
    images = []
    # labels will contains the label that is assigned to the image
    labels = []
    #gender will contains 1 or 0 indecating male or female
    gender =[]
    
    directory = 'face'
    
    if os.path.exists(directory):
        shutil.rmtree(directory)
        os.makedirs(directory)     
    else:
        os.makedirs(directory) 
    
    for image_path in image_paths:
        # Read the image and convert to grayscale
        try:
            image_pil = Image.open(image_path).convert('L')
            # Convert the image format into numpy array
            image = np.array(image_pil, 'uint8')
            # Get the label of the image
        except:
            pass
        
        nbr = int(os.path.split(image_path)[1].split(".")[1])
        gender_current = os.path.split(image_path)[1].split(".")[2]
        #print nbr
        
        # Detect the face in the image
        faces = faceCascade.detectMultiScale(image)
        # If face is detected, append the face to images and the label to labels
        try:
            for (x, y, w, h) in faces:

                ref_image = image[y: y + h, x: x + w]
                resized = cv2.resize(image, (100, 100), interpolation = cv2.INTER_AREA)
                resized_face = cv2.resize(ref_image, (100, 100), interpolation = cv2.INTER_AREA)
                #edge_images = cv2.Canny(resized,100,200)
                
                images.append(np.array(resized_face))   #resized.reshape(1,10000)
                labels.append(nbr)

                if gender_current == 'Male':
                    gender.append(1)
                else:
                    gender.append(0)
                
                face_file_name = "face/" + str(os.path.split(image_path)[1])
                cv2.imwrite(face_file_name, resized_face)
                
                cv2.imshow("Adding faces to traning set...", resized_face)
                cv2.waitKey(1)
        except:
            pass
    # return the images list and labels list
    #print "lables"
    #print labels
    #print "gender_current"
    #print gender
    
    return images, labels, gender

In [5]:
images, labels, gender = get_images_and_labels('Label_Images_Gender')

cv2.destroyAllWindows()

In [6]:
with open("tmp_gender_tensorflow.pickle", "wb") as f:
    pickle.dump((images,labels,gender), f)

In [ ]:
with open("tmp_gender_tensorflow.pickle", "rb") as f:
    images, labels, gender = pickle.load(f)

In [7]:
#res_images = []
res_gender = []

for gen in gender:
    res_gender.append(np.array(gen))

res_gender = np.array(res_gender)

In [8]:
res_gender.shape


Out[8]:
(2980,)

Basic model parameters as external flags.


In [9]:
# Basic model parameters as external flags.
flags = tf.app.flags
FLAGS = flags.FLAGS
flags.DEFINE_float('learning_rate', 0.01, 'Initial learning rate.')
flags.DEFINE_integer('max_steps', 2000, 'Number of steps to run trainer.')
flags.DEFINE_integer('hidden1', 1500, 'Number of units in hidden layer 1.')
flags.DEFINE_integer('hidden2', 1000, 'Number of units in hidden layer 2.')
flags.DEFINE_integer('hidden3', 500, 'Number of units in hidden layer 3.')
flags.DEFINE_integer('batch_size', 100, 'Batch size.  '
                     'Must divide evenly into the dataset sizes.')
flags.DEFINE_string('train_dir', 'data', 'Directory to put the training data.')
flags.DEFINE_boolean('fake_data', False, 'If true, uses fake data '
                     'for unit testing.')


NUM_CLASSES = 2
IMAGE_SIZE = 100
#CHANNELS = 3
IMAGE_PIXELS = IMAGE_SIZE * IMAGE_SIZE

In [10]:
n_nodes = [IMAGE_PIXELS, 1500, 1000, 500, NUM_CLASSES]
n_epochs = 10

In [11]:
def neural_network_model(data):
    n_hidden_layers = 3
    # define the layers
    layers = [] 
    for i in range(n_hidden_layers + 1):
        layers.append( {'weights':tf.Variable(tf.random_normal([n_nodes[i], n_nodes[i+1]])), 
                        'biases':tf.Variable(tf.random_normal([n_nodes[i+1]]))} )
    
    # calculate the nodal values for each layer
    calcs = [data]
    for i in range(n_hidden_layers):
        calcs.append( tf.nn.relu(tf.matmul(calcs[i], layers[i]['weights']) + layers[i]['biases']) )

    #  return the last layer of nodes
    return tf.matmul(calcs[-1], layers[-1]['weights']) + layers[-1]['biases']

In [12]:
def evaluation(logits, labels):
    correct = tf.nn.in_top_k(logits, labels, 1)
  
    return tf.reduce_sum(tf.cast(correct, tf.int32))

In [13]:
def placeholder_inputs():
    images_placeholder = tf.placeholder(tf.float32, [None,IMAGE_PIXELS])
    labels_placeholder = tf.placeholder(tf.float32, [None,NUM_CLASSES])
    
    return images_placeholder, labels_placeholder


def fill_feed_dict(images_feed,labels_feed, images_pl, labels_pl):
    feed_dict = {
      images_pl: images_feed,
      labels_pl: labels_feed,
    }
  
    return feed_dict

In [14]:
def dense_to_one_hot(labels_dense, num_classes):
    """Convert class labels from scalars to one-hot vectors."""
    num_labels = labels_dense.shape[0]
    index_offset = np.arange(num_labels) * num_classes
    labels_one_hot = np.zeros((num_labels, num_classes))
    labels_one_hot.flat[index_offset + labels_dense.ravel()] = 1
    
    return labels_one_hot

In [15]:
def main():
    # Tell TensorFlow that the model will be built into the default Graph.
    with tf.Graph().as_default():
        # Generate placeholders for the images and labels.
        images_placeholder, labels_placeholder = placeholder_inputs()

        
        logits = neural_network_model(images_placeholder)
        
        #cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(logits, labels_placeholder, name='xentropy')
        #cost = tf.reduce_mean(cross_entropy, name='xentropy_mean')
        
        cost = tf.reduce_mean( tf.nn.softmax_cross_entropy_with_logits(logits,labels_placeholder) )
        training_acc = []
        testing_acc = []
       
        #print cost
        optimizer = tf.train.AdamOptimizer(1e-4).minimize(cost)

        with tf.Session() as sess:
            sess.run(tf.initialize_all_variables())
        
        # And then after everything is built, start the training loop.
            
            subset_size = 256
            for step in xrange(200):
                start_time = time.time()
                total_loss = 0
                for i in range(int(train_images.shape[0] / subset_size) ):
                    
                    epoch_x = train_images[i * subset_size:][:subset_size]
                    epoch_y = train_labels[i * subset_size:][:subset_size]
                    
                    feed_dict = fill_feed_dict(epoch_x, epoch_y, images_placeholder, labels_placeholder)
                    
                    _, loss_value = sess.run([optimizer, cost],
                                           feed_dict=feed_dict)
                    
                    total_loss+=loss_value
                    
                duration = time.time() - start_time
                #if step % 10 == 0:
                    #Print status to stdout.
                correct = tf.equal(tf.argmax(logits,1), tf.argmax(labels_placeholder,1))
                #print correct
                accuracy = tf.reduce_mean(tf.cast(correct, 'float'))
                
                print('Step %d: loss = %.2f (%.3f sec)' % (step, total_loss, duration)),
                
                current_train_acc = accuracy.eval({images_placeholder: train_images, labels_placeholder: train_labels})
                current_test_acc = accuracy.eval({images_placeholder: test_images, labels_placeholder: test_labels})
                
                training_acc.append(current_train_acc)
                testing_acc.append(current_test_acc)
                
                
                print('Training Accuracy:', current_train_acc),
                print('Testing Accuracy:', current_test_acc)
    
    return training_acc, testing_acc

In [16]:
# Get the sets of images and labels for training, validation, and

images = np.array(images)
images = images.reshape(images.shape[0],IMAGE_PIXELS)

#label = res_gender
labels = dense_to_one_hot(res_gender,2)

In [17]:
train_images = images[:-300]
train_labels = labels[:-300]
test_images = images[-300:]
test_labels = labels[-300:]

In [18]:
train_images.shape


Out[18]:
(2680, 10000)

In [19]:
train_labels.shape


Out[19]:
(2680, 2)

In [84]:
if __name__ == '__main__':
    train_acc_whole_image, test_acc_whole_image = main()


Step 0: loss = 270972768.00 (8.142 sec) ('Training Accuracy:', 0.51455224) ('Testing Accuracy:', 0.52999997)
Step 1: loss = 196890163.00 (7.765 sec) ('Training Accuracy:', 0.58619404) ('Testing Accuracy:', 0.61000001)
Step 2: loss = 152967398.00 (7.769 sec) ('Training Accuracy:', 0.63208956) ('Testing Accuracy:', 0.64999998)
Step 3: loss = 126975156.00 (8.511 sec) ('Training Accuracy:', 0.64664179) ('Testing Accuracy:', 0.63999999)
Step 4: loss = 112515974.00 (8.522 sec) ('Training Accuracy:', 0.65820897) ('Testing Accuracy:', 0.64666665)
Step 5: loss = 101201192.00 (8.489 sec) ('Training Accuracy:', 0.66567165) ('Testing Accuracy:', 0.64666665)
Step 6: loss = 91702610.00 (8.517 sec) ('Training Accuracy:', 0.68507463) ('Testing Accuracy:', 0.62666667)
Step 7: loss = 83602773.00 (8.543 sec) ('Training Accuracy:', 0.69253731) ('Testing Accuracy:', 0.63)
Step 8: loss = 76173784.00 (8.702 sec) ('Training Accuracy:', 0.70559704) ('Testing Accuracy:', 0.63999999)
Step 9: loss = 69487701.00 (8.466 sec) ('Training Accuracy:', 0.72462684) ('Testing Accuracy:', 0.63999999)
Step 10: loss = 63469991.00 (8.686 sec) ('Training Accuracy:', 0.73283583) ('Testing Accuracy:', 0.63)
Step 11: loss = 58576214.50 (8.533 sec) ('Training Accuracy:', 0.74365669) ('Testing Accuracy:', 0.63666666)
Step 12: loss = 53949497.50 (8.512 sec) ('Training Accuracy:', 0.74850744) ('Testing Accuracy:', 0.63333333)
Step 13: loss = 49980812.25 (8.524 sec) ('Training Accuracy:', 0.75895524) ('Testing Accuracy:', 0.63333333)
Step 14: loss = 46662470.50 (8.492 sec) ('Training Accuracy:', 0.7779851) ('Testing Accuracy:', 0.64666665)
Step 15: loss = 44073816.00 (9.408 sec) ('Training Accuracy:', 0.76940298) ('Testing Accuracy:', 0.62666667)
Step 16: loss = 40038448.50 (8.512 sec) ('Training Accuracy:', 0.78917909) ('Testing Accuracy:', 0.64666665)
Step 17: loss = 37668004.75 (9.556 sec) ('Training Accuracy:', 0.78955221) ('Testing Accuracy:', 0.63999999)
Step 18: loss = 34599935.75 (8.835 sec) ('Training Accuracy:', 0.7977612) ('Testing Accuracy:', 0.64666665)
Step 19: loss = 32602350.50 (8.756 sec) ('Training Accuracy:', 0.79962689) ('Testing Accuracy:', 0.63999999)
Step 20: loss = 30530272.25 (8.686 sec) ('Training Accuracy:', 0.79701495) ('Testing Accuracy:', 0.63999999)
Step 21: loss = 28581496.25 (8.526 sec) ('Training Accuracy:', 0.80783582) ('Testing Accuracy:', 0.63999999)
Step 22: loss = 27463539.38 (8.532 sec) ('Training Accuracy:', 0.81417912) ('Testing Accuracy:', 0.63999999)
Step 23: loss = 27273009.62 (9.311 sec) ('Training Accuracy:', 0.81791043) ('Testing Accuracy:', 0.63999999)
Step 24: loss = 24722166.75 (8.751 sec) ('Training Accuracy:', 0.81044775) ('Testing Accuracy:', 0.62666667)
Step 25: loss = 22893935.12 (9.631 sec) ('Training Accuracy:', 0.82910448) ('Testing Accuracy:', 0.63999999)
Step 26: loss = 21461402.50 (8.480 sec) ('Training Accuracy:', 0.83395523) ('Testing Accuracy:', 0.63999999)
Step 27: loss = 20451092.38 (8.764 sec) ('Training Accuracy:', 0.81604475) ('Testing Accuracy:', 0.62333333)
Step 28: loss = 18780996.62 (8.503 sec) ('Training Accuracy:', 0.82761192) ('Testing Accuracy:', 0.63999999)
Step 29: loss = 18083719.38 (8.417 sec) ('Training Accuracy:', 0.83805972) ('Testing Accuracy:', 0.64333332)
Step 30: loss = 17656573.50 (9.638 sec) ('Training Accuracy:', 0.82761192) ('Testing Accuracy:', 0.62)
Step 31: loss = 19645679.12 (10.396 sec) ('Training Accuracy:', 0.8455224) ('Testing Accuracy:', 0.64666665)
Step 32: loss = 18060747.00 (8.741 sec) ('Training Accuracy:', 0.84738809) ('Testing Accuracy:', 0.64666665)
Step 33: loss = 16287630.62 (9.660 sec) ('Training Accuracy:', 0.85037315) ('Testing Accuracy:', 0.64333332)
Step 34: loss = 16022550.38 (8.590 sec) ('Training Accuracy:', 0.85223883) ('Testing Accuracy:', 0.65333331)
Step 35: loss = 16126444.75 (8.497 sec) ('Training Accuracy:', 0.84477609) ('Testing Accuracy:', 0.6566667)
Step 36: loss = 20160074.50 (9.361 sec) ('Training Accuracy:', 0.85261196) ('Testing Accuracy:', 0.66000003)
Step 37: loss = 19058979.19 (12.278 sec) ('Training Accuracy:', 0.83619404) ('Testing Accuracy:', 0.66000003)
Step 38: loss = 25349797.44 (10.372 sec) ('Training Accuracy:', 0.79440296) ('Testing Accuracy:', 0.66000003)
Step 39: loss = 38846857.50 (13.289 sec) ('Training Accuracy:', 0.68656719) ('Testing Accuracy:', 0.5933333)
Step 40: loss = 62103928.88 (10.789 sec) ('Training Accuracy:', 0.72462684) ('Testing Accuracy:', 0.64666665)
Step 41: loss = 71226229.75 (10.653 sec) ('Training Accuracy:', 0.85932833) ('Testing Accuracy:', 0.64999998)
Step 42: loss = 32997437.50 (11.605 sec) ('Training Accuracy:', 0.84776121) ('Testing Accuracy:', 0.67000002)
Step 43: loss = 17865575.44 (11.444 sec) ('Training Accuracy:', 0.87985075) ('Testing Accuracy:', 0.64666665)
Step 44: loss = 10992759.50 (12.746 sec) ('Training Accuracy:', 0.91194028) ('Testing Accuracy:', 0.64999998)
Step 45: loss = 9481201.88 (13.132 sec) ('Training Accuracy:', 0.88171643) ('Testing Accuracy:', 0.6566667)
Step 46: loss = 11327278.38 (12.454 sec) ('Training Accuracy:', 0.85447758) ('Testing Accuracy:', 0.66666669)
Step 47: loss = 13075309.38 (11.753 sec) ('Training Accuracy:', 0.83432835) ('Testing Accuracy:', 0.68666667)
Step 48: loss = 14929054.31 (9.016 sec) ('Training Accuracy:', 0.85522389) ('Testing Accuracy:', 0.67666668)
Step 49: loss = 15793690.38 (8.591 sec) ('Training Accuracy:', 0.91305971) ('Testing Accuracy:', 0.66333336)
Step 50: loss = 14468444.62 (8.599 sec) ('Training Accuracy:', 0.91679102) ('Testing Accuracy:', 0.65333331)
Step 51: loss = 14563142.75 (10.491 sec) ('Training Accuracy:', 0.84664178) ('Testing Accuracy:', 0.61666667)
Step 52: loss = 14506599.44 (9.252 sec) ('Training Accuracy:', 0.77873135) ('Testing Accuracy:', 0.60666668)
Step 53: loss = 13960424.81 (9.345 sec) ('Training Accuracy:', 0.85820895) ('Testing Accuracy:', 0.60666668)
Step 54: loss = 9370278.97 (9.544 sec) ('Training Accuracy:', 0.93320894) ('Testing Accuracy:', 0.63999999)
Step 55: loss = 5877129.66 (9.007 sec) ('Training Accuracy:', 0.93358207) ('Testing Accuracy:', 0.65333331)
Step 56: loss = 5098471.66 (8.840 sec) ('Training Accuracy:', 0.94776118) ('Testing Accuracy:', 0.65333331)
Step 57: loss = 4095753.14 (8.889 sec) ('Training Accuracy:', 0.94141793) ('Testing Accuracy:', 0.63333333)
Step 58: loss = 3414996.81 (8.752 sec) ('Training Accuracy:', 0.94402987) ('Testing Accuracy:', 0.63999999)
Step 59: loss = 2922241.89 (8.809 sec) ('Training Accuracy:', 0.95261192) ('Testing Accuracy:', 0.63666666)
Step 60: loss = 2593172.27 (9.181 sec) ('Training Accuracy:', 0.94552237) ('Testing Accuracy:', 0.64333332)
Step 61: loss = 2488027.86 (8.948 sec) ('Training Accuracy:', 0.9507463) ('Testing Accuracy:', 0.64333332)
Step 62: loss = 2245486.94 (8.957 sec) ('Training Accuracy:', 0.93955225) ('Testing Accuracy:', 0.63999999)
Step 63: loss = 2528921.20 (8.824 sec) ('Training Accuracy:', 0.93432838) ('Testing Accuracy:', 0.64666665)
Step 64: loss = 2537535.28 (9.151 sec) ('Training Accuracy:', 0.92611939) ('Testing Accuracy:', 0.64666665)
Step 65: loss = 3005881.19 (9.124 sec) ('Training Accuracy:', 0.94514924) ('Testing Accuracy:', 0.64333332)
Step 66: loss = 3324701.26 (9.476 sec) ('Training Accuracy:', 0.94999999) ('Testing Accuracy:', 0.63999999)
Step 67: loss = 3793705.20 (9.519 sec) ('Training Accuracy:', 0.95261192) ('Testing Accuracy:', 0.63)
Step 68: loss = 4387789.72 (9.277 sec) ('Training Accuracy:', 0.86231345) ('Testing Accuracy:', 0.62)
Step 69: loss = 6043101.11 (10.178 sec) ('Training Accuracy:', 0.79179102) ('Testing Accuracy:', 0.61666667)
Step 70: loss = 6922691.58 (10.144 sec) ('Training Accuracy:', 0.80149251) ('Testing Accuracy:', 0.62333333)
Step 71: loss = 7570687.47 (8.463 sec) ('Training Accuracy:', 0.93283582) ('Testing Accuracy:', 0.62666667)
Step 72: loss = 6708722.72 (9.437 sec) ('Training Accuracy:', 0.93358207) ('Testing Accuracy:', 0.63999999)
Step 73: loss = 6487390.58 (10.786 sec) ('Training Accuracy:', 0.8511194) ('Testing Accuracy:', 0.68666667)
Step 74: loss = 7916039.42 (10.281 sec) ('Training Accuracy:', 0.81977612) ('Testing Accuracy:', 0.68333334)
Step 75: loss = 8200313.57 (10.052 sec) ('Training Accuracy:', 0.86865669) ('Testing Accuracy:', 0.68000001)
Step 76: loss = 7284946.14 (8.827 sec) ('Training Accuracy:', 0.9402985) ('Testing Accuracy:', 0.67333335)
Step 77: loss = 6235194.27 (8.591 sec) ('Training Accuracy:', 0.95634329) ('Testing Accuracy:', 0.63333333)
Step 78: loss = 3710916.05 (12.107 sec) ('Training Accuracy:', 0.86343282) ('Testing Accuracy:', 0.62333333)
Step 79: loss = 3525167.68 (10.512 sec) ('Training Accuracy:', 0.96753728) ('Testing Accuracy:', 0.63333333)
Step 80: loss = 2005166.45 (9.940 sec) ('Training Accuracy:', 0.96305972) ('Testing Accuracy:', 0.62666667)
Step 81: loss = 1157530.96 (8.881 sec) ('Training Accuracy:', 0.96679103) ('Testing Accuracy:', 0.63333333)
Step 82: loss = 899801.12 (8.404 sec) ('Training Accuracy:', 0.97350746) ('Testing Accuracy:', 0.62333333)
Step 83: loss = 732586.93 (9.337 sec) ('Training Accuracy:', 0.97723883) ('Testing Accuracy:', 0.63333333)
Step 84: loss = 543998.98 (8.628 sec) ('Training Accuracy:', 0.9761194) ('Testing Accuracy:', 0.62666667)
Step 85: loss = 446674.48 (8.462 sec) ('Training Accuracy:', 0.97947758) ('Testing Accuracy:', 0.62666667)
Step 86: loss = 378534.73 (9.000 sec) ('Training Accuracy:', 0.97723883) ('Testing Accuracy:', 0.62333333)
Step 87: loss = 331424.46 (9.120 sec) ('Training Accuracy:', 0.97873133) ('Testing Accuracy:', 0.62666667)
Step 88: loss = 305232.42 (9.335 sec) ('Training Accuracy:', 0.97686565) ('Testing Accuracy:', 0.63666666)
Step 89: loss = 270454.88 (9.512 sec) ('Training Accuracy:', 0.97873133) ('Testing Accuracy:', 0.61666667)
Step 90: loss = 196186.20 (8.726 sec) ('Training Accuracy:', 0.97835821) ('Testing Accuracy:', 0.61333334)
Step 91: loss = 171031.62 (9.288 sec) ('Training Accuracy:', 0.98022389) ('Testing Accuracy:', 0.63)
Step 92: loss = 150501.05 (10.730 sec) ('Training Accuracy:', 0.97910446) ('Testing Accuracy:', 0.63666666)
Step 93: loss = 147522.41 (11.427 sec) ('Training Accuracy:', 0.97985077) ('Testing Accuracy:', 0.62666667)
Step 94: loss = 92746.08 (10.835 sec) ('Training Accuracy:', 0.98022389) ('Testing Accuracy:', 0.62666667)
Step 95: loss = 74336.08 (9.172 sec) ('Training Accuracy:', 0.97910446) ('Testing Accuracy:', 0.63666666)
Step 96: loss = 69169.20 (9.853 sec) ('Training Accuracy:', 0.98097014) ('Testing Accuracy:', 0.62666667)
Step 97: loss = 51678.05 (9.129 sec) ('Training Accuracy:', 0.9824627) ('Testing Accuracy:', 0.62666667)
Step 98: loss = 44157.47 (8.918 sec) ('Training Accuracy:', 0.98171639) ('Testing Accuracy:', 0.62666667)
Step 99: loss = 34482.00 (8.498 sec) ('Training Accuracy:', 0.98320895) ('Testing Accuracy:', 0.63)
Step 100: loss = 28598.16 (8.877 sec) ('Training Accuracy:', 0.98283583) ('Testing Accuracy:', 0.63)
Step 101: loss = 23071.05 (10.528 sec) ('Training Accuracy:', 0.98320895) ('Testing Accuracy:', 0.63)
Step 102: loss = 17369.70 (8.717 sec) ('Training Accuracy:', 0.98171639) ('Testing Accuracy:', 0.63333333)
Step 103: loss = 17078.60 (9.997 sec) ('Training Accuracy:', 0.98171639) ('Testing Accuracy:', 0.62666667)
Step 104: loss = 10533.69 (8.572 sec) ('Training Accuracy:', 0.98283583) ('Testing Accuracy:', 0.62666667)
Step 105: loss = 7965.75 (8.667 sec) ('Training Accuracy:', 0.98320895) ('Testing Accuracy:', 0.62333333)
Step 106: loss = 683.11 (8.775 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 107: loss = 0.00 (8.675 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 108: loss = 0.00 (9.004 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 109: loss = 0.00 (8.633 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 110: loss = 0.00 (8.881 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 111: loss = 0.00 (8.996 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 112: loss = 0.00 (8.586 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 113: loss = 0.00 (8.473 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 114: loss = 0.00 (8.975 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 115: loss = 0.00 (9.385 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 116: loss = 0.00 (10.086 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 117: loss = 0.00 (9.413 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 118: loss = 0.00 (8.684 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 119: loss = 0.00 (8.792 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 120: loss = 0.00 (9.496 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 121: loss = 0.00 (8.481 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 122: loss = 0.00 (9.860 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 123: loss = 0.00 (9.045 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 124: loss = 0.00 (8.388 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 125: loss = 0.00 (8.951 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 126: loss = 0.00 (10.367 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 127: loss = 0.00 (9.686 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 128: loss = 0.00 (9.412 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 129: loss = 0.00 (8.630 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 130: loss = 0.00 (8.506 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 131: loss = 0.00 (8.710 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 132: loss = 0.00 (8.626 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 133: loss = 0.00 (8.971 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 134: loss = 0.00 (8.490 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 135: loss = 0.00 (9.644 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 136: loss = 0.00 (9.908 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 137: loss = 0.00 (8.722 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 138: loss = 0.00 (8.573 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 139: loss = 0.00 (8.946 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 140: loss = 0.00 (9.788 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 141: loss = 0.00 (8.585 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 142: loss = 0.00 (8.421 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 143: loss = 0.00 (9.112 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 144: loss = 0.00 (8.450 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 145: loss = 0.00 (8.882 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 146: loss = 0.00 (9.756 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 147: loss = 0.00 (8.384 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 148: loss = 0.00 (8.485 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 149: loss = 0.00 (8.470 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 150: loss = 0.00 (8.585 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 151: loss = 0.00 (10.321 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 152: loss = 0.00 (9.357 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 153: loss = 0.00 (8.910 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 154: loss = 0.00 (8.835 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 155: loss = 0.00 (8.907 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 156: loss = 0.00 (9.406 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 157: loss = 0.00 (8.707 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 158: loss = 0.00 (8.699 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 159: loss = 0.00 (8.725 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 160: loss = 0.00 (8.898 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 161: loss = 0.00 (9.041 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 162: loss = 0.00 (8.986 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 163: loss = 0.00 (8.934 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 164: loss = 0.00 (8.826 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 165: loss = 0.00 (8.890 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 166: loss = 0.00 (9.051 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 167: loss = 0.00 (8.930 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 168: loss = 0.00 (8.979 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 169: loss = 0.00 (11.093 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 170: loss = 0.00 (9.486 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 171: loss = 0.00 (10.127 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 172: loss = 0.00 (9.038 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 173: loss = 0.00 (9.051 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 174: loss = 0.00 (9.337 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 175: loss = 0.00 (9.082 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 176: loss = 0.00 (9.197 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 177: loss = 0.00 (9.458 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 178: loss = 0.00 (9.415 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 179: loss = 0.00 (9.286 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 180: loss = 0.00 (9.569 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 181: loss = 0.00 (9.544 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 182: loss = 0.00 (10.457 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 183: loss = 0.00 (9.471 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 184: loss = 0.00 (9.539 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 185: loss = 0.00 (9.752 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 186: loss = 0.00 (10.173 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 187: loss = 0.00 (10.301 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 188: loss = 0.00 (10.003 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 189: loss = 0.00 (9.628 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 190: loss = 0.00 (10.508 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 191: loss = 0.00 (9.780 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 192: loss = 0.00 (9.847 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 193: loss = 0.00 (9.617 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 194: loss = 0.00 (9.832 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 195: loss = 0.00 (9.832 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 196: loss = 0.00 (9.882 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 197: loss = 0.00 (9.785 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 198: loss = 0.00 (9.485 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)
Step 199: loss = 0.00 (9.610 sec) ('Training Accuracy:', 0.98208958) ('Testing Accuracy:', 0.62333333)

In [20]:
if __name__ == '__main__':
    train_acc_face, test_acc_face = main()


Step 0: loss = 530957034.00 (7.833 sec) ('Training Accuracy:', 0.55447763) ('Testing Accuracy:', 0.53666669)
Step 1: loss = 225841179.00 (7.505 sec) ('Training Accuracy:', 0.54626864) ('Testing Accuracy:', 0.55333334)
Step 2: loss = 141243949.00 (7.498 sec) ('Training Accuracy:', 0.65895522) ('Testing Accuracy:', 0.66333336)
Step 3: loss = 114297752.00 (7.344 sec) ('Training Accuracy:', 0.64701492) ('Testing Accuracy:', 0.63)
Step 4: loss = 87723113.00 (7.997 sec) ('Training Accuracy:', 0.67611939) ('Testing Accuracy:', 0.67666668)
Step 5: loss = 76319744.50 (8.014 sec) ('Training Accuracy:', 0.67947763) ('Testing Accuracy:', 0.67666668)
Step 6: loss = 69800254.50 (7.998 sec) ('Training Accuracy:', 0.68731344) ('Testing Accuracy:', 0.69999999)
Step 7: loss = 63875468.50 (8.019 sec) ('Training Accuracy:', 0.69999999) ('Testing Accuracy:', 0.70333332)
Step 8: loss = 58704504.75 (8.031 sec) ('Training Accuracy:', 0.70373136) ('Testing Accuracy:', 0.70333332)
Step 9: loss = 54707788.50 (8.138 sec) ('Training Accuracy:', 0.71194029) ('Testing Accuracy:', 0.71333331)
Step 10: loss = 51497053.75 (8.178 sec) ('Training Accuracy:', 0.70932835) ('Testing Accuracy:', 0.69666666)
Step 11: loss = 48950831.00 (8.076 sec) ('Training Accuracy:', 0.71119404) ('Testing Accuracy:', 0.69666666)
Step 12: loss = 46228567.50 (8.104 sec) ('Training Accuracy:', 0.71604478) ('Testing Accuracy:', 0.70333332)
Step 13: loss = 43910161.50 (8.053 sec) ('Training Accuracy:', 0.71791047) ('Testing Accuracy:', 0.69666666)
Step 14: loss = 41871933.25 (8.050 sec) ('Training Accuracy:', 0.71940297) ('Testing Accuracy:', 0.69333333)
Step 15: loss = 39719442.50 (8.040 sec) ('Training Accuracy:', 0.72723883) ('Testing Accuracy:', 0.69666666)
Step 16: loss = 37532508.50 (8.063 sec) ('Training Accuracy:', 0.73843282) ('Testing Accuracy:', 0.70333332)
Step 17: loss = 35679525.50 (8.049 sec) ('Training Accuracy:', 0.75335819) ('Testing Accuracy:', 0.72000003)
Step 18: loss = 33630079.25 (8.055 sec) ('Training Accuracy:', 0.76007462) ('Testing Accuracy:', 0.72000003)
Step 19: loss = 31934225.12 (8.065 sec) ('Training Accuracy:', 0.7738806) ('Testing Accuracy:', 0.73000002)
Step 20: loss = 30821638.00 (8.068 sec) ('Training Accuracy:', 0.77201492) ('Testing Accuracy:', 0.72333336)
Step 21: loss = 28850039.62 (8.071 sec) ('Training Accuracy:', 0.78246272) ('Testing Accuracy:', 0.72333336)
Step 22: loss = 27892153.62 (8.105 sec) ('Training Accuracy:', 0.78955221) ('Testing Accuracy:', 0.73000002)
Step 23: loss = 26137488.00 (8.123 sec) ('Training Accuracy:', 0.79067165) ('Testing Accuracy:', 0.72000003)
Step 24: loss = 24821207.38 (8.095 sec) ('Training Accuracy:', 0.7992537) ('Testing Accuracy:', 0.73000002)
Step 25: loss = 23506273.50 (8.070 sec) ('Training Accuracy:', 0.80522388) ('Testing Accuracy:', 0.73000002)
Step 26: loss = 22358256.25 (8.086 sec) ('Training Accuracy:', 0.80559701) ('Testing Accuracy:', 0.72666669)
Step 27: loss = 21222296.25 (8.092 sec) ('Training Accuracy:', 0.81044775) ('Testing Accuracy:', 0.72333336)
Step 28: loss = 20194933.00 (8.082 sec) ('Training Accuracy:', 0.81268656) ('Testing Accuracy:', 0.72000003)
Step 29: loss = 19072746.75 (8.087 sec) ('Training Accuracy:', 0.81865674) ('Testing Accuracy:', 0.72666669)
Step 30: loss = 18111730.00 (8.160 sec) ('Training Accuracy:', 0.81828356) ('Testing Accuracy:', 0.72666669)
Step 31: loss = 17515122.12 (8.078 sec) ('Training Accuracy:', 0.81828356) ('Testing Accuracy:', 0.72333336)
Step 32: loss = 17382956.38 (9.505 sec) ('Training Accuracy:', 0.82649255) ('Testing Accuracy:', 0.72000003)
Step 33: loss = 16107770.50 (11.374 sec) ('Training Accuracy:', 0.82499999) ('Testing Accuracy:', 0.71666664)
Step 34: loss = 15324089.75 (9.493 sec) ('Training Accuracy:', 0.82611942) ('Testing Accuracy:', 0.72000003)
Step 35: loss = 14771474.00 (10.103 sec) ('Training Accuracy:', 0.83320898) ('Testing Accuracy:', 0.72000003)
Step 36: loss = 14530014.12 (7.395 sec) ('Training Accuracy:', 0.83619404) ('Testing Accuracy:', 0.73000002)
Step 37: loss = 13269117.94 (11.944 sec) ('Training Accuracy:', 0.82686567) ('Testing Accuracy:', 0.72000003)
Step 38: loss = 13180534.62 (9.819 sec) ('Training Accuracy:', 0.83395523) ('Testing Accuracy:', 0.72666669)
Step 39: loss = 13820938.81 (10.696 sec) ('Training Accuracy:', 0.83582091) ('Testing Accuracy:', 0.72666669)
Step 40: loss = 11780236.38 (9.459 sec) ('Training Accuracy:', 0.83208954) ('Testing Accuracy:', 0.72333336)
Step 41: loss = 12258694.12 (9.468 sec) ('Training Accuracy:', 0.84029853) ('Testing Accuracy:', 0.72000003)
Step 42: loss = 11400786.69 (11.445 sec) ('Training Accuracy:', 0.84067166) ('Testing Accuracy:', 0.72000003)
Step 43: loss = 10567732.69 (9.507 sec) ('Training Accuracy:', 0.8335821) ('Testing Accuracy:', 0.73000002)
Step 44: loss = 10495550.00 (9.464 sec) ('Training Accuracy:', 0.83059704) ('Testing Accuracy:', 0.72666669)
Step 45: loss = 10615955.19 (10.908 sec) ('Training Accuracy:', 0.84477609) ('Testing Accuracy:', 0.72000003)
Step 46: loss = 11235297.50 (12.263 sec) ('Training Accuracy:', 0.84813434) ('Testing Accuracy:', 0.72333336)
Step 47: loss = 9594179.31 (11.312 sec) ('Training Accuracy:', 0.82014924) ('Testing Accuracy:', 0.72000003)
Step 48: loss = 9777451.06 (9.111 sec) ('Training Accuracy:', 0.82798505) ('Testing Accuracy:', 0.73666668)
Step 49: loss = 9726813.31 (11.627 sec) ('Training Accuracy:', 0.84738809) ('Testing Accuracy:', 0.73000002)
Step 50: loss = 10557414.00 (12.469 sec) ('Training Accuracy:', 0.81902987) ('Testing Accuracy:', 0.72333336)
Step 51: loss = 9048399.50 (9.908 sec) ('Training Accuracy:', 0.811194) ('Testing Accuracy:', 0.71666664)
Step 52: loss = 11346758.00 (9.491 sec) ('Training Accuracy:', 0.82462686) ('Testing Accuracy:', 0.72000003)
Step 53: loss = 9018292.12 (10.205 sec) ('Training Accuracy:', 0.80261195) ('Testing Accuracy:', 0.70999998)
Step 54: loss = 10026315.72 (10.261 sec) ('Training Accuracy:', 0.813806) ('Testing Accuracy:', 0.70999998)
Step 55: loss = 9643834.41 (11.397 sec) ('Training Accuracy:', 0.81417912) ('Testing Accuracy:', 0.72000003)
Step 56: loss = 9429158.97 (10.163 sec) ('Training Accuracy:', 0.80634326) ('Testing Accuracy:', 0.70666665)
Step 57: loss = 9578189.75 (9.473 sec) ('Training Accuracy:', 0.79850745) ('Testing Accuracy:', 0.70333332)
Step 58: loss = 9679513.88 (11.272 sec) ('Training Accuracy:', 0.76865673) ('Testing Accuracy:', 0.70666665)
Step 59: loss = 14001550.81 (9.705 sec) ('Training Accuracy:', 0.76716417) ('Testing Accuracy:', 0.71333331)
Step 60: loss = 15879888.50 (9.479 sec) ('Training Accuracy:', 0.72537315) ('Testing Accuracy:', 0.68666667)
Step 61: loss = 21243296.62 (10.518 sec) ('Training Accuracy:', 0.73731345) ('Testing Accuracy:', 0.70999998)
Step 62: loss = 30951840.88 (10.074 sec) ('Training Accuracy:', 0.81828356) ('Testing Accuracy:', 0.72333336)
Step 63: loss = 35931541.25 (9.457 sec) ('Training Accuracy:', 0.86119401) ('Testing Accuracy:', 0.75666666)
Step 64: loss = 33500111.12 (10.084 sec) ('Training Accuracy:', 0.77686566) ('Testing Accuracy:', 0.67000002)
Step 65: loss = 27796859.62 (10.441 sec) ('Training Accuracy:', 0.75373137) ('Testing Accuracy:', 0.66000003)
Step 66: loss = 22847664.75 (9.483 sec) ('Training Accuracy:', 0.78694028) ('Testing Accuracy:', 0.67666668)
Step 67: loss = 16038778.69 (9.637 sec) ('Training Accuracy:', 0.83619404) ('Testing Accuracy:', 0.69)
Step 68: loss = 10116828.22 (11.151 sec) ('Training Accuracy:', 0.87723881) ('Testing Accuracy:', 0.72000003)
Step 69: loss = 7241437.59 (9.446 sec) ('Training Accuracy:', 0.90634328) ('Testing Accuracy:', 0.76666665)
Step 70: loss = 6505740.75 (9.424 sec) ('Training Accuracy:', 0.90634328) ('Testing Accuracy:', 0.75999999)
Step 71: loss = 6483261.53 (10.978 sec) ('Training Accuracy:', 0.90970147) ('Testing Accuracy:', 0.74333334)
Step 72: loss = 6018772.31 (9.497 sec) ('Training Accuracy:', 0.91044778) ('Testing Accuracy:', 0.74333334)
Step 73: loss = 6193634.62 (9.466 sec) ('Training Accuracy:', 0.91119403) ('Testing Accuracy:', 0.73666668)
Step 74: loss = 6104793.50 (10.939 sec) ('Training Accuracy:', 0.90932834) ('Testing Accuracy:', 0.74000001)
Step 75: loss = 6202914.06 (11.116 sec) ('Training Accuracy:', 0.90858209) ('Testing Accuracy:', 0.74333334)
Step 76: loss = 6573527.06 (9.519 sec) ('Training Accuracy:', 0.90895522) ('Testing Accuracy:', 0.73333335)
Step 77: loss = 6583490.97 (9.456 sec) ('Training Accuracy:', 0.90559703) ('Testing Accuracy:', 0.74333334)
Step 78: loss = 7907350.06 (11.001 sec) ('Training Accuracy:', 0.90708953) ('Testing Accuracy:', 0.73000002)
Step 79: loss = 9191025.69 (9.781 sec) ('Training Accuracy:', 0.9029851) ('Testing Accuracy:', 0.73333335)
Step 80: loss = 10519257.75 (9.640 sec) ('Training Accuracy:', 0.87425375) ('Testing Accuracy:', 0.70666665)
Step 81: loss = 13035793.25 (10.198 sec) ('Training Accuracy:', 0.80149251) ('Testing Accuracy:', 0.67333335)
Step 82: loss = 16948571.34 (10.678 sec) ('Training Accuracy:', 0.73134327) ('Testing Accuracy:', 0.63999999)
Step 83: loss = 21252655.38 (9.509 sec) ('Training Accuracy:', 0.73507464) ('Testing Accuracy:', 0.64666665)
Step 84: loss = 21086679.03 (9.532 sec) ('Training Accuracy:', 0.82276118) ('Testing Accuracy:', 0.68000001)
Step 85: loss = 15766022.00 (11.497 sec) ('Training Accuracy:', 0.89999998) ('Testing Accuracy:', 0.74000001)
Step 86: loss = 8716707.38 (9.741 sec) ('Training Accuracy:', 0.91940296) ('Testing Accuracy:', 0.75666666)
Step 87: loss = 5637330.25 (9.536 sec) ('Training Accuracy:', 0.91417909) ('Testing Accuracy:', 0.77666664)
Step 88: loss = 4522287.22 (10.374 sec) ('Training Accuracy:', 0.90783584) ('Testing Accuracy:', 0.76333332)
Step 89: loss = 4148048.78 (10.404 sec) ('Training Accuracy:', 0.90223879) ('Testing Accuracy:', 0.76333332)
Step 90: loss = 3455095.36 (9.519 sec) ('Training Accuracy:', 0.91716421) ('Testing Accuracy:', 0.75666666)
Step 91: loss = 3351460.23 (10.290 sec) ('Training Accuracy:', 0.92500001) ('Testing Accuracy:', 0.75666666)
Step 92: loss = 3398425.62 (11.716 sec) ('Training Accuracy:', 0.93134326) ('Testing Accuracy:', 0.75333333)
Step 93: loss = 4245507.34 (12.057 sec) ('Training Accuracy:', 0.90223879) ('Testing Accuracy:', 0.70999998)
Step 94: loss = 5943325.52 (11.087 sec) ('Training Accuracy:', 0.84664178) ('Testing Accuracy:', 0.68000001)
Step 95: loss = 7670328.03 (10.101 sec) ('Training Accuracy:', 0.7914179) ('Testing Accuracy:', 0.66333336)
Step 96: loss = 9014455.28 (16.474 sec) ('Training Accuracy:', 0.7899254) ('Testing Accuracy:', 0.66666669)
Step 97: loss = 9350608.92 (11.299 sec) ('Training Accuracy:', 0.81641793) ('Testing Accuracy:', 0.68333334)
Step 98: loss = 8009765.59 (9.665 sec) ('Training Accuracy:', 0.87537313) ('Testing Accuracy:', 0.68666667)
Step 99: loss = 6368379.44 (10.088 sec) ('Training Accuracy:', 0.90485072) ('Testing Accuracy:', 0.72666669)
Step 100: loss = 5521285.66 (10.835 sec) ('Training Accuracy:', 0.93395525) ('Testing Accuracy:', 0.74333334)
Step 101: loss = 4834843.95 (12.126 sec) ('Training Accuracy:', 0.92388058) ('Testing Accuracy:', 0.77333331)
Step 102: loss = 4438953.75 (9.820 sec) ('Training Accuracy:', 0.90074629) ('Testing Accuracy:', 0.76666665)
Step 103: loss = 4530911.35 (9.522 sec) ('Training Accuracy:', 0.88656718) ('Testing Accuracy:', 0.75)
Step 104: loss = 4170186.19 (10.315 sec) ('Training Accuracy:', 0.9044776) ('Testing Accuracy:', 0.74666667)
Step 105: loss = 3563975.62 (10.495 sec) ('Training Accuracy:', 0.9242537) ('Testing Accuracy:', 0.75)
Step 106: loss = 3419954.38 (9.680 sec) ('Training Accuracy:', 0.94067162) ('Testing Accuracy:', 0.76333332)
Step 107: loss = 3900172.16 (9.959 sec) ('Training Accuracy:', 0.93171644) ('Testing Accuracy:', 0.74000001)
Step 108: loss = 3755002.17 (10.246 sec) ('Training Accuracy:', 0.90111941) ('Testing Accuracy:', 0.70666665)
Step 109: loss = 4539722.55 (10.634 sec) ('Training Accuracy:', 0.84477609) ('Testing Accuracy:', 0.68000001)
Step 110: loss = 5087151.99 (7.644 sec) ('Training Accuracy:', 0.83320898) ('Testing Accuracy:', 0.67666668)
Step 111: loss = 5247337.45 (11.649 sec) ('Training Accuracy:', 0.84776121) ('Testing Accuracy:', 0.67666668)
Step 112: loss = 4725729.64 (9.578 sec) ('Training Accuracy:', 0.8910448) ('Testing Accuracy:', 0.68333334)
Step 113: loss = 4341503.27 (10.448 sec) ('Training Accuracy:', 0.92649251) ('Testing Accuracy:', 0.72333336)
Step 114: loss = 4276987.09 (10.423 sec) ('Training Accuracy:', 0.94141793) ('Testing Accuracy:', 0.74000001)
Step 115: loss = 4403008.58 (9.541 sec) ('Training Accuracy:', 0.94328356) ('Testing Accuracy:', 0.76666665)
Step 116: loss = 4420080.12 (9.649 sec) ('Training Accuracy:', 0.89962685) ('Testing Accuracy:', 0.75)
Step 117: loss = 4610410.38 (11.191 sec) ('Training Accuracy:', 0.8708955) ('Testing Accuracy:', 0.74666667)
Step 118: loss = 4924084.88 (9.527 sec) ('Training Accuracy:', 0.86231345) ('Testing Accuracy:', 0.73000002)
Step 119: loss = 4566207.55 (9.671 sec) ('Training Accuracy:', 0.90597016) ('Testing Accuracy:', 0.74333334)
Step 120: loss = 4258060.92 (10.625 sec) ('Training Accuracy:', 0.94999999) ('Testing Accuracy:', 0.75333333)
Step 121: loss = 3704155.56 (10.214 sec) ('Training Accuracy:', 0.94328356) ('Testing Accuracy:', 0.74666667)
Step 122: loss = 3004893.87 (9.495 sec) ('Training Accuracy:', 0.92164177) ('Testing Accuracy:', 0.71666664)
Step 123: loss = 3352951.51 (9.790 sec) ('Training Accuracy:', 0.87686568) ('Testing Accuracy:', 0.69333333)
Step 124: loss = 3633737.41 (11.119 sec) ('Training Accuracy:', 0.85559702) ('Testing Accuracy:', 0.68666667)
Step 125: loss = 3695729.81 (9.508 sec) ('Training Accuracy:', 0.90335822) ('Testing Accuracy:', 0.69999999)
Step 126: loss = 3325456.38 (9.861 sec) ('Training Accuracy:', 0.94253731) ('Testing Accuracy:', 0.73000002)
Step 127: loss = 2882269.16 (10.326 sec) ('Training Accuracy:', 0.94776118) ('Testing Accuracy:', 0.74666667)
Step 128: loss = 2545887.39 (11.276 sec) ('Training Accuracy:', 0.93171644) ('Testing Accuracy:', 0.75666666)
Step 129: loss = 2378972.40 (9.500 sec) ('Training Accuracy:', 0.93656719) ('Testing Accuracy:', 0.76333332)
Step 130: loss = 1731009.14 (9.504 sec) ('Training Accuracy:', 0.95335823) ('Testing Accuracy:', 0.75666666)
Step 131: loss = 1734834.80 (11.021 sec) ('Training Accuracy:', 0.95746267) ('Testing Accuracy:', 0.74000001)
Step 132: loss = 1521000.71 (9.877 sec) ('Training Accuracy:', 0.9481343) ('Testing Accuracy:', 0.74000001)
Step 133: loss = 1423962.86 (9.510 sec) ('Training Accuracy:', 0.92089552) ('Testing Accuracy:', 0.72333336)
Step 134: loss = 1504684.48 (8.157 sec) ('Training Accuracy:', 0.93805969) ('Testing Accuracy:', 0.73666668)
Step 135: loss = 1556232.00 (11.938 sec) ('Training Accuracy:', 0.95559704) ('Testing Accuracy:', 0.74666667)
Step 136: loss = 1475386.57 (11.182 sec) ('Training Accuracy:', 0.95597017) ('Testing Accuracy:', 0.74666667)
Step 137: loss = 1274547.94 (10.532 sec) ('Training Accuracy:', 0.94626868) ('Testing Accuracy:', 0.74333334)
Step 138: loss = 1390084.14 (9.626 sec) ('Training Accuracy:', 0.93768656) ('Testing Accuracy:', 0.73666668)
Step 139: loss = 1060685.13 (11.716 sec) ('Training Accuracy:', 0.96156716) ('Testing Accuracy:', 0.75)
Step 140: loss = 830844.73 (12.145 sec) ('Training Accuracy:', 0.9600746) ('Testing Accuracy:', 0.75333333)
Step 141: loss = 763642.54 (9.600 sec) ('Training Accuracy:', 0.96604478) ('Testing Accuracy:', 0.74666667)
Step 142: loss = 631249.12 (9.761 sec) ('Training Accuracy:', 0.96529853) ('Testing Accuracy:', 0.73666668)
Step 143: loss = 873665.30 (10.576 sec) ('Training Accuracy:', 0.95298505) ('Testing Accuracy:', 0.73333335)
Step 144: loss = 996793.10 (10.734 sec) ('Training Accuracy:', 0.93395525) ('Testing Accuracy:', 0.72666669)
Step 145: loss = 1226965.67 (9.585 sec) ('Training Accuracy:', 0.93134326) ('Testing Accuracy:', 0.73333335)
Step 146: loss = 1172432.68 (9.524 sec) ('Training Accuracy:', 0.90746272) ('Testing Accuracy:', 0.71666664)
Step 147: loss = 1409667.04 (11.138 sec) ('Training Accuracy:', 0.89029849) ('Testing Accuracy:', 0.69666666)
Step 148: loss = 1698964.59 (9.942 sec) ('Training Accuracy:', 0.8589552) ('Testing Accuracy:', 0.68333334)
Step 149: loss = 1854918.99 (9.547 sec) ('Training Accuracy:', 0.86044776) ('Testing Accuracy:', 0.68333334)
Step 150: loss = 2247289.27 (10.195 sec) ('Training Accuracy:', 0.85708958) ('Testing Accuracy:', 0.67666668)
Step 151: loss = 2370925.65 (10.831 sec) ('Training Accuracy:', 0.83843285) ('Testing Accuracy:', 0.67000002)
Step 152: loss = 2620404.10 (9.545 sec) ('Training Accuracy:', 0.84440297) ('Testing Accuracy:', 0.67333335)
Step 153: loss = 2655132.12 (9.664 sec) ('Training Accuracy:', 0.88134331) ('Testing Accuracy:', 0.68666667)
Step 154: loss = 3056952.23 (10.938 sec) ('Training Accuracy:', 0.85559702) ('Testing Accuracy:', 0.66333336)
Step 155: loss = 2736799.16 (10.107 sec) ('Training Accuracy:', 0.92126864) ('Testing Accuracy:', 0.69999999)
Step 156: loss = 3775359.71 (9.800 sec) ('Training Accuracy:', 0.88246268) ('Testing Accuracy:', 0.68666667)
Step 157: loss = 3548380.61 (9.904 sec) ('Training Accuracy:', 0.92500001) ('Testing Accuracy:', 0.71333331)
Step 158: loss = 4812390.83 (12.007 sec) ('Training Accuracy:', 0.93022388) ('Testing Accuracy:', 0.73000002)
Step 159: loss = 4701074.40 (9.989 sec) ('Training Accuracy:', 0.94701493) ('Testing Accuracy:', 0.73333335)
Step 160: loss = 6497813.86 (8.829 sec) ('Training Accuracy:', 0.94962686) ('Testing Accuracy:', 0.76666665)
Step 161: loss = 7136075.75 (8.074 sec) ('Training Accuracy:', 0.93059701) ('Testing Accuracy:', 0.79000002)
Step 162: loss = 7803914.17 (7.949 sec) ('Training Accuracy:', 0.8470149) ('Testing Accuracy:', 0.74666667)
Step 163: loss = 9603285.78 (7.930 sec) ('Training Accuracy:', 0.77686566) ('Testing Accuracy:', 0.70999998)
Step 164: loss = 11316093.46 (8.279 sec) ('Training Accuracy:', 0.811194) ('Testing Accuracy:', 0.73000002)
Step 165: loss = 9999308.72 (7.913 sec) ('Training Accuracy:', 0.85858208) ('Testing Accuracy:', 0.74666667)
Step 166: loss = 9705523.67 (8.105 sec) ('Training Accuracy:', 0.92574626) ('Testing Accuracy:', 0.76999998)
Step 167: loss = 9461750.91 (8.181 sec) ('Training Accuracy:', 0.95970148) ('Testing Accuracy:', 0.76999998)
Step 168: loss = 7977673.69 (7.932 sec) ('Training Accuracy:', 0.93694031) ('Testing Accuracy:', 0.75999999)
Step 169: loss = 6291326.77 (7.917 sec) ('Training Accuracy:', 0.86082089) ('Testing Accuracy:', 0.68666667)
Step 170: loss = 6385828.06 (8.145 sec) ('Training Accuracy:', 0.82611942) ('Testing Accuracy:', 0.66333336)
Step 171: loss = 5074269.34 (7.941 sec) ('Training Accuracy:', 0.8630597) ('Testing Accuracy:', 0.67000002)
Step 172: loss = 3494445.31 (7.976 sec) ('Training Accuracy:', 0.95597017) ('Testing Accuracy:', 0.75)
Step 173: loss = 2544812.97 (7.931 sec) ('Training Accuracy:', 0.94067162) ('Testing Accuracy:', 0.76666665)
Step 174: loss = 2128728.66 (7.900 sec) ('Training Accuracy:', 0.94850749) ('Testing Accuracy:', 0.75666666)
Step 175: loss = 1425505.16 (8.569 sec) ('Training Accuracy:', 0.96492535) ('Testing Accuracy:', 0.75)
Step 176: loss = 783611.13 (7.918 sec) ('Training Accuracy:', 0.96753728) ('Testing Accuracy:', 0.76333332)
Step 177: loss = 530685.57 (7.954 sec) ('Training Accuracy:', 0.96194029) ('Testing Accuracy:', 0.75666666)
Step 178: loss = 441537.40 (8.006 sec) ('Training Accuracy:', 0.96305972) ('Testing Accuracy:', 0.75)
Step 179: loss = 432475.66 (8.197 sec) ('Training Accuracy:', 0.96641791) ('Testing Accuracy:', 0.74666667)
Step 180: loss = 478178.28 (8.046 sec) ('Training Accuracy:', 0.9705224) ('Testing Accuracy:', 0.75)
Step 181: loss = 606558.71 (8.138 sec) ('Training Accuracy:', 0.9641791) ('Testing Accuracy:', 0.75333333)
Step 182: loss = 573477.61 (8.931 sec) ('Training Accuracy:', 0.96529853) ('Testing Accuracy:', 0.75)
Step 183: loss = 398613.96 (8.531 sec) ('Training Accuracy:', 0.97276121) ('Testing Accuracy:', 0.74666667)
Step 184: loss = 229598.90 (7.930 sec) ('Training Accuracy:', 0.97500002) ('Testing Accuracy:', 0.75666666)
Step 185: loss = 166174.89 (7.921 sec) ('Training Accuracy:', 0.97537315) ('Testing Accuracy:', 0.75666666)
Step 186: loss = 155779.26 (7.915 sec) ('Training Accuracy:', 0.97388059) ('Testing Accuracy:', 0.74000001)
Step 187: loss = 194581.24 (8.157 sec) ('Training Accuracy:', 0.97462684) ('Testing Accuracy:', 0.73666668)
Step 188: loss = 171248.66 (7.953 sec) ('Training Accuracy:', 0.97425371) ('Testing Accuracy:', 0.75)
Step 189: loss = 174438.43 (7.928 sec) ('Training Accuracy:', 0.97313434) ('Testing Accuracy:', 0.74666667)
Step 190: loss = 173143.45 (7.925 sec) ('Training Accuracy:', 0.97425371) ('Testing Accuracy:', 0.73666668)
Step 191: loss = 198379.16 (7.933 sec) ('Training Accuracy:', 0.97574627) ('Testing Accuracy:', 0.73333335)
Step 192: loss = 283666.66 (8.105 sec) ('Training Accuracy:', 0.97723883) ('Testing Accuracy:', 0.74333334)
Step 193: loss = 377846.05 (7.940 sec) ('Training Accuracy:', 0.97164178) ('Testing Accuracy:', 0.75333333)
Step 194: loss = 466602.04 (8.088 sec) ('Training Accuracy:', 0.96082091) ('Testing Accuracy:', 0.75666666)
Step 195: loss = 351742.43 (7.907 sec) ('Training Accuracy:', 0.9720149) ('Testing Accuracy:', 0.75666666)
Step 196: loss = 220316.57 (8.191 sec) ('Training Accuracy:', 0.97014928) ('Testing Accuracy:', 0.75999999)
Step 197: loss = 223357.74 (8.142 sec) ('Training Accuracy:', 0.97462684) ('Testing Accuracy:', 0.75999999)
Step 198: loss = 306118.28 (8.080 sec) ('Training Accuracy:', 0.97574627) ('Testing Accuracy:', 0.75999999)
Step 199: loss = 322885.64 (8.097 sec) ('Training Accuracy:', 0.97574627) ('Testing Accuracy:', 0.74000001)

In [113]:
max(test_acc)


Out[113]:
0.81

In [109]:
x = np.arange(1000)

plt.plot(x, train_acc)
#plt.plot(x, test_acc)


plt.legend(['y = training_samples', 'y = testing_samples'], loc='lower right')

plt.show()



In [110]:
x = np.arange(1000)

plt.plot(x, test_acc)


plt.legend(['y = testing_samples'], loc='lower right')

plt.show()



In [116]:
accuracy_list = pd.DataFrame(
    {'Training_Acc': train_acc,
     'Testing_Acc': test_acc,
    })

In [117]:
accuracy_list.plot()


Out[117]:
<matplotlib.axes.AxesSubplot at 0x7f7825f85f90>

In [25]:
accuracy_list_face = pd.DataFrame(
    {'Training_Acc': train_acc_face,
     'Testing_Acc': test_acc_face,
    })
ax = accuracy_list_face.plot(title='Gender Classification using Face')
ax.set_xlabel("Samples")
ax.set_ylabel("Accuracy")


Out[25]:
<matplotlib.text.Text at 0x7f3bd5f2b790>

In [85]:
accuracy_list_whole = pd.DataFrame(
    {'Training_Acc': train_acc_whole_image,
     'Testing_Acc': test_acc_whole_image,
    })

In [86]:
accuracy_list_whole.plot()


Out[86]:
<matplotlib.axes.AxesSubplot at 0x7eff075c6990>