In [2]:
%pylab inline


Populating the interactive namespace from numpy and matplotlib

In [3]:
import sys
from time import time
import matplotlib as pl
import matplotlib.pyplot as plt
import pickle

In [4]:
dataPath = '/Users/omojumiller/mycode/MachineLearningNanoDegree/IntroToMachineLearning/'
sys.path.append(dataPath+'tools/')
sys.path.append(dataPath+'final_project/')

from feature_format import featureFormat, targetFeatureSplit
from tester import dump_classifier_and_data

In [5]:
### Load the dictionary containing the dataset

with open(dataPath+'final_project/final_project_dataset.pkl', "r") as data_file:
    data_dict = pickle.load(data_file)

In [7]:
# Remove the source of the outlier
data_dict.pop( 'TOTAL')


Out[7]:
{'bonus': 97343619,
 'deferral_payments': 32083396,
 'deferred_income': -27992891,
 'director_fees': 1398517,
 'email_address': 'NaN',
 'exercised_stock_options': 311764000,
 'expenses': 5235198,
 'from_messages': 'NaN',
 'from_poi_to_this_person': 'NaN',
 'from_this_person_to_poi': 'NaN',
 'loan_advances': 83925000,
 'long_term_incentive': 48521928,
 'other': 42667589,
 'poi': False,
 'restricted_stock': 130322299,
 'restricted_stock_deferred': -7576788,
 'salary': 26704229,
 'shared_receipt_with_poi': 'NaN',
 'to_messages': 'NaN',
 'total_payments': 309886585,
 'total_stock_value': 434509511}

In [20]:
features_list = [ 'long_term_incentive', 'bonus']
data = featureFormat(data_dict, features_list, remove_any_zeroes=True)

for point in data:
    salary = point[0]
    bonus = point[1]
    plt.scatter( salary, bonus )

#plt.xticks(np.arange(0, 1e6, 200000), rotation = -60)
plt.xlim((0, 1e7))


plt.xlabel("bonus")
plt.ylabel("long_term_incentive")
plt.show()



In [21]:
def doPCA():
    from sklearn.decomposition import PCA
    pca = PCA(n_components=2)
    pca.fit(data)
    return pca

In [22]:
pca = doPCA()

In [23]:
print pca.explained_variance_ratio_


[ 0.90774318  0.09225682]

In [27]:
first_pc = pca.components_[0]
second_pc = pca.components_[1]

print first_pc, second_pc


[ 0.29782915  0.95461919] [ 0.95461919 -0.29782915]

In [25]:
transformed_data = pca.transform(data)

In [30]:
for ii, jj in zip(transformed_data, data):
    plt.scatter( first_pc[0]*ii[0], first_pc[1]*ii[0], color='r' )
    plt.scatter( second_pc[0]*ii[1], second_pc[1]*ii[1], color='c' )
    plt.scatter(jj[0], jj[1], color='b') #original data

#plt.xlim((0, 1e7))
plt.xlabel("bonus")
plt.ylabel("long_term_incentive")
plt.show()


Eigenfaces Mini Project


In [31]:
print __doc__

from time import time
import logging
import pylab as pl
import numpy as np


Automatically created module for IPython interactive environment

In [32]:
from sklearn.cross_validation import train_test_split
from sklearn.datasets import fetch_lfw_people
from sklearn.grid_search import GridSearchCV
from sklearn.metrics import classification_report
from sklearn.metrics import confusion_matrix
from sklearn.decomposition import RandomizedPCA
from sklearn.svm import SVC

In [33]:
# Display progress logs on stdout
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')

In [34]:
# Download the data, if not already on disk and load it as numpy arrays
lfw_people = fetch_lfw_people(min_faces_per_person=70, resize=0.4)

In [35]:
# introspect the images arrays to find the shapes (for plotting)
n_samples, h, w = lfw_people.images.shape
np.random.seed(42)

In [36]:
# for machine learning we use the data directly (as relative pixel
# position info is ignored by this model)
X = lfw_people.data
n_features = X.shape[1]

# the label to predict is the id of the person
y = lfw_people.target
target_names = lfw_people.target_names
n_classes = target_names.shape[0]

print "Total dataset size:"
print "n_samples: %d" % n_samples
print "n_features: %d" % n_features
print "n_classes: %d" % n_classes


Total dataset size:
n_samples: 1288
n_features: 1850
n_classes: 7

In [37]:
# Split into a training and testing set
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=42)

In [48]:
def computeEigenfaces():
    
    print "Extracting the top %d eigenfaces from %d faces" % (n_components, X_train.shape[0])
    t0 = time()
    pca = RandomizedPCA(n_components=n_components, whiten=True).fit(X_train)
    print "done in %0.3fs" % (time() - t0)

    eigenfaces = pca.components_.reshape((n_components, h, w))

    print "Projecting the input data on the eigenfaces orthonormal basis"
    t0 = time()
    X_train_pca = pca.transform(X_train)
    X_test_pca = pca.transform(X_test)
    print "done in %0.3fs" % (time() - t0)
    
    # Train a SVM classification model

    print "Fitting the classifier to the training set"
    t0 = time()
    param_grid = {
          'C': [1e3, 5e3, 1e4, 5e4, 1e5],
          'gamma': [0.0001, 0.0005, 0.001, 0.005, 0.01, 0.1],
          }
    # for sklearn version 0.16 or prior, the class_weight parameter value is 'auto'
    clf = GridSearchCV(SVC(kernel='rbf', class_weight='balanced'), param_grid)
    clf = clf.fit(X_train_pca, y_train)
    print "done in %0.3fs" % (time() - t0)
    print "Best estimator found by grid search:"
    print clf.best_estimator_
    
    
    # Quantitative evaluation of the model quality on the test set

    print "Predicting the people names on the testing set"
    t0 = time()
    y_pred = clf.predict(X_test_pca)
    print "done in %0.3fs" % (time() - t0)

    print classification_report(y_test, y_pred, target_names=target_names)
    print confusion_matrix(y_test, y_pred, labels=range(n_classes))

In [42]:
# Qualitative evaluation of the predictions using matplotlib

def plot_gallery(images, titles, h, w, n_row=3, n_col=4):
    """Helper function to plot a gallery of portraits"""
    pl.figure(figsize=(1.8 * n_col, 2.4 * n_row))
    pl.subplots_adjust(bottom=0, left=.01, right=.99, top=.90, hspace=.35)
    for i in range(n_row * n_col):
        pl.subplot(n_row, n_col, i + 1)
        pl.imshow(images[i].reshape((h, w)), cmap=pl.cm.gray)
        pl.title(titles[i], size=12)
        pl.xticks(())
        pl.yticks(())
        
# plot the result of the prediction on a portion of the test set

def title(y_pred, y_test, target_names, i):
    pred_name = target_names[y_pred[i]].rsplit(' ', 1)[-1]
    true_name = target_names[y_test[i]].rsplit(' ', 1)[-1]
    return 'predicted: %s\ntrue:      %s' % (pred_name, true_name)

In [53]:
# Compute a PCA (eigenfaces) on the face dataset (treated as unlabeled
# dataset): unsupervised feature extraction / dimensionality reduction

for i in [10, 15, 25, 50, 100, 250]:
    print '\n', '-' * 45, '\n'
    print "Computing eigenfaces with ", i, " principal components"
    print '\n', '-' * 45
    n_components = i
    computeEigenfaces()


--------------------------------------------- 

Computing eigenfaces with  10  principal components

---------------------------------------------
Extracting the top 10 eigenfaces from 966 faces
done in 0.106s
Projecting the input data on the eigenfaces orthonormal basis
done in 0.017s
Fitting the classifier to the training set
done in 56.843s
Best estimator found by grid search:
SVC(C=1000.0, cache_size=200, class_weight='balanced', coef0=0.0,
  decision_function_shape=None, degree=3, gamma=0.1, kernel='rbf',
  max_iter=-1, probability=False, random_state=None, shrinking=True,
  tol=0.001, verbose=False)
Predicting the people names on the testing set
done in 0.011s
                   precision    recall  f1-score   support

     Ariel Sharon       0.10      0.15      0.12        13
     Colin Powell       0.44      0.53      0.48        60
  Donald Rumsfeld       0.27      0.37      0.31        27
    George W Bush       0.67      0.59      0.63       146
Gerhard Schroeder       0.18      0.20      0.19        25
      Hugo Chavez       0.29      0.13      0.18        15
       Tony Blair       0.48      0.39      0.43        36

      avg / total       0.49      0.47      0.48       322

[[ 2  6  1  3  1  0  0]
 [ 8 32  3 10  3  1  3]
 [ 2  8 10  6  0  0  1]
 [ 6 20 10 86 14  2  8]
 [ 0  1  4 10  5  2  3]
 [ 1  2  3  7  0  2  0]
 [ 1  3  6  7  5  0 14]]

--------------------------------------------- 

Computing eigenfaces with  15  principal components

---------------------------------------------
Extracting the top 15 eigenfaces from 966 faces
done in 0.107s
Projecting the input data on the eigenfaces orthonormal basis
done in 0.021s
Fitting the classifier to the training set
done in 19.946s
Best estimator found by grid search:
SVC(C=5000.0, cache_size=200, class_weight='balanced', coef0=0.0,
  decision_function_shape=None, degree=3, gamma=0.001, kernel='rbf',
  max_iter=-1, probability=False, random_state=None, shrinking=True,
  tol=0.001, verbose=False)
Predicting the people names on the testing set
done in 0.012s
                   precision    recall  f1-score   support

     Ariel Sharon       0.27      0.46      0.34        13
     Colin Powell       0.64      0.70      0.67        60
  Donald Rumsfeld       0.47      0.56      0.51        27
    George W Bush       0.87      0.71      0.78       146
Gerhard Schroeder       0.50      0.60      0.55        25
      Hugo Chavez       0.44      0.47      0.45        15
       Tony Blair       0.54      0.56      0.55        36

      avg / total       0.68      0.65      0.66       322

[[  6   5   1   1   0   0   0]
 [  9  42   4   3   2   0   0]
 [  3   2  15   6   1   0   0]
 [  3  12   9 103   6   4   9]
 [  0   0   0   3  15   2   5]
 [  0   2   0   1   2   7   3]
 [  1   3   3   2   4   3  20]]

--------------------------------------------- 

Computing eigenfaces with  25  principal components

---------------------------------------------
Extracting the top 25 eigenfaces from 966 faces
done in 0.120s
Projecting the input data on the eigenfaces orthonormal basis
done in 0.021s
Fitting the classifier to the training set
done in 8.857s
Best estimator found by grid search:
SVC(C=1000.0, cache_size=200, class_weight='balanced', coef0=0.0,
  decision_function_shape=None, degree=3, gamma=0.01, kernel='rbf',
  max_iter=-1, probability=False, random_state=None, shrinking=True,
  tol=0.001, verbose=False)
Predicting the people names on the testing set
done in 0.011s
                   precision    recall  f1-score   support

     Ariel Sharon       0.47      0.62      0.53        13
     Colin Powell       0.76      0.88      0.82        60
  Donald Rumsfeld       0.56      0.52      0.54        27
    George W Bush       0.85      0.84      0.84       146
Gerhard Schroeder       0.59      0.52      0.55        25
      Hugo Chavez       0.77      0.67      0.71        15
       Tony Blair       0.69      0.61      0.65        36

      avg / total       0.75      0.75      0.75       322

[[  8   3   1   1   0   0   0]
 [  3  53   2   1   0   0   1]
 [  3   2  14   7   1   0   0]
 [  2   8   6 122   5   1   2]
 [  0   0   1   6  13   0   5]
 [  0   2   0   1   0  10   2]
 [  1   2   1   5   3   2  22]]

--------------------------------------------- 

Computing eigenfaces with  50  principal components

---------------------------------------------
Extracting the top 50 eigenfaces from 966 faces
done in 0.102s
Projecting the input data on the eigenfaces orthonormal basis
done in 0.017s
Fitting the classifier to the training set
done in 6.367s
Best estimator found by grid search:
SVC(C=1000.0, cache_size=200, class_weight='balanced', coef0=0.0,
  decision_function_shape=None, degree=3, gamma=0.01, kernel='rbf',
  max_iter=-1, probability=False, random_state=None, shrinking=True,
  tol=0.001, verbose=False)
Predicting the people names on the testing set
done in 0.022s
                   precision    recall  f1-score   support

     Ariel Sharon       0.69      0.69      0.69        13
     Colin Powell       0.78      0.88      0.83        60
  Donald Rumsfeld       0.71      0.56      0.63        27
    George W Bush       0.86      0.92      0.89       146
Gerhard Schroeder       0.71      0.60      0.65        25
      Hugo Chavez       0.79      0.73      0.76        15
       Tony Blair       0.83      0.67      0.74        36

      avg / total       0.81      0.81      0.81       322

[[  9   2   2   0   0   0   0]
 [  1  53   1   4   0   0   1]
 [  1   4  15   6   1   0   0]
 [  1   5   2 134   1   1   2]
 [  0   0   1   5  15   2   2]
 [  0   2   0   1   1  11   0]
 [  1   2   0   6   3   0  24]]

--------------------------------------------- 

Computing eigenfaces with  100  principal components

---------------------------------------------
Extracting the top 100 eigenfaces from 966 faces
done in 0.149s
Projecting the input data on the eigenfaces orthonormal basis
done in 0.021s
Fitting the classifier to the training set
done in 11.962s
Best estimator found by grid search:
SVC(C=1000.0, cache_size=200, class_weight='balanced', coef0=0.0,
  decision_function_shape=None, degree=3, gamma=0.005, kernel='rbf',
  max_iter=-1, probability=False, random_state=None, shrinking=True,
  tol=0.001, verbose=False)
Predicting the people names on the testing set
done in 0.036s
                   precision    recall  f1-score   support

     Ariel Sharon       0.64      0.69      0.67        13
     Colin Powell       0.79      0.88      0.83        60
  Donald Rumsfeld       0.77      0.63      0.69        27
    George W Bush       0.87      0.94      0.90       146
Gerhard Schroeder       0.90      0.72      0.80        25
      Hugo Chavez       0.89      0.53      0.67        15
       Tony Blair       0.84      0.75      0.79        36

      avg / total       0.84      0.84      0.83       322

[[  9   0   2   2   0   0   0]
 [  0  53   0   5   0   1   1]
 [  3   2  17   5   0   0   0]
 [  1   6   1 137   0   0   1]
 [  0   1   1   3  18   0   2]
 [  0   3   0   2   1   8   1]
 [  1   2   1   4   1   0  27]]

--------------------------------------------- 

Computing eigenfaces with  250  principal components

---------------------------------------------
Extracting the top 250 eigenfaces from 966 faces
done in 0.363s
Projecting the input data on the eigenfaces orthonormal basis
done in 0.054s
Fitting the classifier to the training set
done in 32.012s
Best estimator found by grid search:
SVC(C=1000.0, cache_size=200, class_weight='balanced', coef0=0.0,
  decision_function_shape=None, degree=3, gamma=0.001, kernel='rbf',
  max_iter=-1, probability=False, random_state=None, shrinking=True,
  tol=0.001, verbose=False)
Predicting the people names on the testing set
done in 0.112s
                   precision    recall  f1-score   support

     Ariel Sharon       0.59      0.77      0.67        13
     Colin Powell       0.72      0.87      0.79        60
  Donald Rumsfeld       0.72      0.67      0.69        27
    George W Bush       0.90      0.88      0.89       146
Gerhard Schroeder       0.82      0.72      0.77        25
      Hugo Chavez       0.78      0.47      0.58        15
       Tony Blair       0.83      0.81      0.82        36

      avg / total       0.82      0.81      0.81       322

[[ 10   0   2   1   0   0   0]
 [  1  52   3   3   0   1   0]
 [  3   2  18   2   0   0   2]
 [  2  11   2 128   1   1   1]
 [  0   2   0   3  18   0   2]
 [  0   4   0   2   1   7   1]
 [  1   1   0   3   2   0  29]]

In [43]:
prediction_titles = [title(y_pred, y_test, target_names, i)
                         for i in range(y_pred.shape[0])]

plot_gallery(X_test, prediction_titles, h, w)



In [44]:
# plot the gallery of the most significative eigenfaces

eigenface_titles = ["eigenface %d" % i for i in range(eigenfaces.shape[0])]
plot_gallery(eigenfaces, eigenface_titles, h, w)

pl.show()



In [45]:
pca.explained_variance_ratio_


Out[45]:
array([ 0.19346474,  0.15116931,  0.07083688,  0.05952028,  0.05157574,
        0.02887213,  0.02514474,  0.02176463,  0.0201937 ,  0.01902118,
        0.01682174,  0.01580626,  0.01223351,  0.01087937,  0.01064428,
        0.00979671,  0.00892415,  0.00854861,  0.00835728,  0.00722645,
        0.0069658 ,  0.00653871,  0.00639547,  0.0056132 ,  0.00531102,
        0.00520167,  0.00507469,  0.00484211,  0.00443586,  0.0041782 ,
        0.00393684,  0.00381711,  0.00356077,  0.00351197,  0.00334554,
        0.00329936,  0.00314637,  0.00296207,  0.00290131,  0.00284712,
        0.00279984,  0.00267544,  0.00259903,  0.00258378,  0.00240921,
        0.00238992,  0.0023542 ,  0.00222581,  0.00217505,  0.00216559,
        0.00209063,  0.00205427,  0.00200421,  0.00197374,  0.00193836,
        0.00188752,  0.00180161,  0.00178887,  0.00174822,  0.00173047,
        0.00165645,  0.00162943,  0.00157416,  0.00153416,  0.00149965,
        0.00147248,  0.00143907,  0.00141871,  0.00139683,  0.00138136,
        0.00133992,  0.0013316 ,  0.00128791,  0.00125579,  0.00124233,
        0.00121852,  0.00120941,  0.00118278,  0.00115082,  0.00113637,
        0.00112584,  0.00111595,  0.00109367,  0.00107116,  0.00105647,
        0.00104313,  0.00102371,  0.00101671,  0.00099745,  0.00096303,
        0.00094186,  0.00091912,  0.00091247,  0.00089121,  0.00087122,
        0.00086184,  0.00084263,  0.00083822,  0.00082796,  0.00080223,
        0.00078577,  0.00078122,  0.00075587,  0.00075114,  0.00074598,
        0.00073313,  0.00072898,  0.00071414,  0.00070431,  0.00069459,
        0.00066695,  0.000662  ,  0.00065307,  0.00063473,  0.00063451,
        0.00062274,  0.00061385,  0.00060799,  0.00059879,  0.00059017,
        0.00057828,  0.00057157,  0.00056324,  0.00055927,  0.00054464,
        0.0005364 ,  0.00052823,  0.00051678,  0.00050816,  0.00050589,
        0.00050298,  0.000489  ,  0.00048116,  0.00047171,  0.0004635 ,
        0.00045969,  0.00045613,  0.00044617,  0.00044104,  0.00043225,
        0.00042921,  0.00042433,  0.00041861,  0.00041253,  0.00040052,
        0.00039732,  0.00039123,  0.00038892,  0.00037575,  0.00037328])

In [ ]: