In [1]:
import numpy as np

from bokeh.plotting import HBox, VBox, figure, show, output_file, GridPlot
from bokeh.models.mappers import LinearColorMapper
from bokeh.models import BasicTicker, Grid 

from sklearn import metrics
from sklearn import preprocessing
from sklearn import cross_validation
from sklearn.datasets import fetch_olivetti_faces
from sklearn.utils.validation import check_random_state
from sklearn.ensemble import ExtraTreesClassifier, RandomForestClassifier
from sklearn.neighbors import KNeighborsRegressor
from sklearn.linear_model import LinearRegression
from sklearn.linear_model import Ridge, RidgeCV
from sklearn.cross_validation import train_test_split
from sklearn.covariance import GraphLassoCV, ledoit_wolf
from sklearn.grid_search import GridSearchCV
from scipy.spatial import distance
 
import scipy
import sklearn
import OVFM.Model as md
import OVFM.FeatureMap as fm
import OVFM.Risk as rsk
import OVFM.LearningRate as lr
import OVFM.DataGeneration as dg
import OVFM.SGD as sgd
 
import time
import sys

In [2]:
def generate_features_train( nb_points, nb_gaussian, d, sigma = None ):
    
    nb_points = nb_points / nb_gaussian
    
    if sigma is None:
        sigma = []
    # sequence of random pd matrix
        for i in xrange( nb_gaussian ):
            temp = 2 * np.random.rand( d, d ) - 1
            temp = np.dot( temp, temp.T )
            sigma.append( temp / np.linalg.norm( temp, 2 ) )

    # Generate centroids
    centers = [ 10 * np.random.rand( d ) for i in xrange( nb_gaussian ) ]
    
    # Generate data
    gen = [ np.random.multivariate_normal( centers[ i ], sigma[ i ], nb_points ) for i in xrange( nb_gaussian ) ]
    return sklearn.preprocessing.MinMaxScaler( ( -1, 1 ) ).fit_transform( np.concatenate( gen, axis = 0 ) )
    
def generate_features_test( nb_points, d ):
    return sklearn.preprocessing.MinMaxScaler( ( -1, 1 ) ).fit_transform( np.random.rand( nb_points, d ) )

In [3]:
def generate_targets( model, X ):
    model.coefs = np.random.normal( 0, 1, model.coefs.shape )
    return sklearn.preprocessing.MinMaxScaler( ( -1, 1 ) ).fit_transform( model( X ) )

In [11]:
def MSE( p, y ):
    return np.mean( ( p - y ) ** 2 )

def RMSE( p, y ):
    return np.sqrt( MSE( p, y ) )

def MSE_generalisation( model_gen, model_learn, nb_points = 1e4 ):
    X_test = generate_features_test( nb_points, d )
    y_test = sklearn.preprocessing.MinMaxScaler( ( -1, 1 ) ).fit_transform( model_gen( X_test ) )
    return MSE( y_test, model_learn( X_test ) ), X_test, y_test

def RMSE_generalisation( model_gen, model_learn, nb_points = 1e4 ):
    mse, X, y = MSE_generalisation( model_gen, model_learn, nb_points )
    return np.sqrt( mse ), X, y

In [ ]:
def select_eta0( model, risk, eta0_grid, X, y ):
    best_err = np.inf
    for eta0 in eta0_grid:
        lc = lr.InvScaling( eta0, lbda, 1 )
        lb = lr.Constant( 0 ) # no bias
        opt = sgd.SGD( risk, 1.0, lc, lb, 10, 0 )
        model.reset( )
        opt.fit( model, X, y )
        err = MSE( model.predict( X ), y )
        if err < best_err:
            best_eta0 = eta0
            best_err = err
            
    return best_eta0, best_err

In [5]:
# Problem parameters
d = 2
p = 2
D = 1000
n = 10000
n_gaussians = 20
gamma = 1
LT = 2 * np.random.rand( p, p ) - 1
LT = np.dot( LT, LT.T )
LT = LT / np.linalg.norm( LT, 2 )

# Generate train database
X = generate_features_train( n, n_gaussians, d )
model_gen = md.Model( fm.DecomposableFF( gamma, d, D, LT ) )
y = generate_targets( model_gen, X )

In [5]:


In [40]:
L = np.eye( p )
model_learn_id = md.Model( fm.DecomposableFF( 1, d, D, L ) )

In [41]:
eta0 = 1.
lbda = 0.

risk = rsk.Ridge( 0 )
lc = lr.InvScaling( eta0, lbda, 1 )
lb = lr.Constant( 0 ) # no bias
opt = sgd.SGD( risk, 1.0, lc, lb, 10, 0 )
model_learn_id.reset( )
opt.fit( model_learn_id, X, y )

In [42]:
L = LT
model_learn_true = md.Model( fm.DecomposableFF( 1, d, D, L ) )

In [43]:
risk = rsk.Ridge( 0 )
lc = lr.InvScaling( eta0, lbda, 1 )
lb = lr.Constant( 0 ) # no bias
opt = sgd.SGD( risk, 1.0, lc, lb, 10, 0 )
model_learn_true.reset( )
opt.fit( model_learn_true, X, y )

In [46]:
print RMSE_generalisation( model_gen, model_learn_id, 1e4 )[ 0 ]
print RMSE_generalisation( model_gen, model_learn_true, 1e4 )[ 0 ]


0.112528926115
0.104816108742

In [31]:



1.00008627444

In [20]:


In [21]:



0.0761428823617

In [13]:
def scale( X, dim ):
    return ( X[ :, dim ] - X[ :, dim ].min( ) ) / ( X[ :, dim ].max( ) - X[ :, dim ].min( ) )



colors = [ "#%02x%02x%02x" % ( r, g, 150 ) for r, g in zip( 255 * scale( X, 0 ), 255 * scale( X, 1 ) ) ]
p1 = figure( )
p1.circle( X[ :, 0 ], X[ :, 1 ], radius= 0.01, fill_color=colors, fill_alpha=0.6, line_color=None )

p2 = figure( )
p2.circle( y[ :, 0 ], y[ :, 1 ], radius= 0.01, fill_color=colors, fill_alpha=0.6, line_color=None )

In [7]:
output_file( "color_scatter.html", title="color_scatter.py example", mode="cdn" )
show( HBox( p1, p2 ) )


[ 0.53524973  0.70303267  0.50918598 ...,  0.64006498  0.80837641
  0.07825908]

In [ ]: