In [ ]:
import sys
print(sys.version)

In [ ]:
from functools import partial
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
%matplotlib inline
import time

import pandas as pd
import seaborn as sns

In [ ]:
import sys
sys.path.append('../code/')

In [ ]:
from least_squares_sgd import LeastSquaresSGD
from rbf_kernel import RBFKernel
from mnist_helpers import mnist_training, mnist_testing
from hyperparameter_explorer import HyperparameterExplorer

In [ ]:
X_train, y_train = mnist_training(shuffled=True)

X_test, y_test = mnist_testing(shuffled=False)

In [ ]:
data_points = 1000 
X_train, y_train = X_train[0:data_points], y_train[0:data_points]

In [ ]:
h = HyperparameterExplorer(X=X_train, y=y_train, score_name='square loss',
                           primary_hyperparameter='sigma',
                           classifier = LeastSquaresSGD,
                           use_prev_best_weights=True,
                           test_X=X_test, test_y=y_test)

In [ ]:
h.train_model(model_kwargs={'delta_percent':0.01, 'max_epochs':200}, kernel_kwargs={})

In [ ]:
h.models[1].sigma

In [ ]:
h.models[1].plot_loss_and_eta()

In [ ]:
h.models[1].plot_w_hat_history()

In [ ]:
h.train_model(model_kwargs={'delta_percent':0.01, 'max_epochs':200},
              kernel_kwargs ={'sigma':1e5})

In [ ]:
h.models[2].plot_loss_and_eta()

In [ ]:
h.models[2].plot_w_hat_history()

In [ ]:
assert False

In [ ]:
h.summary

In [ ]:
h.train_model(model_kwargs={'eta0':100, 'delta_percent':0.01, 'max_epochs':500},
              kernel_kwargs ={'sigma':1e3})

In [ ]:
h.best('model')

In [ ]:
h.best('score')

In [ ]:
h.summary

In [ ]:
h.plot_fits()

In [ ]:
assert False

In [ ]:
model = LeastSquaresSGD(X=X_train, y=y_train, batch_size=100, kernel=RBF,
                        eta0=1e3, verbose=True,
                        progress_monitoring_freq=2000, max_epochs=500)

In [ ]:
model.eta0

In [ ]:
model.run()

In [ ]:
model.results

In [ ]:
model.plot_01_loss()

In [ ]:
model.plot_01_loss(logx=True)

In [ ]:
model.plot_square_loss(logx=False)

In [ ]:
fig, ax = plt.subplots(1, 1, figsize=(4,3))
plot_data = model.results[model.results['step'] > 1]
plot_x = 'step'
plot_y = 'training (0/1 loss)/N'
colors = ['gray']
plt.plot(plot_data[plot_x], plot_data[plot_y],
             linestyle='--', marker='o',
             color=colors[0])

In [ ]:
model.plot_w_hat_history()

In [ ]:
model.results.columns

In [ ]:
model.plot_loss_and_eta()

Should diverge:


In [ ]:
model_diverge = LeastSquaresSGD(X=X, y=y, batch_size=2, eta0 = model.eta0*10,
                                kernel=RBFKernel, 
                                progress_monitoring_freq=100, max_epochs=500)
model_diverge.run()

In [ ]:
model_diverge.plot_square_loss()