In [1]:
import sklearn

In [2]:
from sklearn.datasets import make_regression
from sklearn.linear_model import SGDRegressor

import pandas as pd
import numpy as np

In [3]:
X, y = make_regression()
pdf = pd.DataFrame(X)
pdf.columns = ['c{}'.format(x) for x in range(100)]

In [4]:
X.shape


Out[4]:
(100, 100)

In [5]:
X1 = pdf[['c{}'.format(x) for x in range(50, 100)]]
X2 = pdf[['c{}'.format(x) for x in range(50)]]

In [103]:
class GraftingRegressor(SGDRegressor):
    def __init__(self, loss="squared_loss", penalty="l2", alpha=0.0001,
                 l1_ratio=0.15, fit_intercept=True, max_iter=None, tol=None,
                 shuffle=True, verbose=0, epsilon=0.1,
                 random_state=None, learning_rate="invscaling", eta0=0.01,
                 power_t=0.25, warm_start=False, average=False, n_iter=None, reg_penalty=None):
        super(GraftingRegressor, self).__init__(loss=loss, penalty=penalty,
                                           alpha=alpha, l1_ratio=l1_ratio,
                                           fit_intercept=fit_intercept,
                                           max_iter=max_iter, tol=tol,
                                           shuffle=shuffle,
                                           verbose=verbose,
                                           epsilon=epsilon,
                                           random_state=random_state,
                                           learning_rate=learning_rate,
                                           eta0=eta0, power_t=power_t,
                                           warm_start=warm_start,
                                           average=average, n_iter=n_iter)
        self.filter_cols = []
        self.base_shape = None
        self.reg_penalty = reg_penalty if reg_penalty is not None else l1_ratio
    
    def _fit_columns(self, X, return_x=True):
        """
        Method filter through "unselected" columns. The goal of this 
        method is to filter any uninformative columns.
        
        This will be selected based on index only?
        
        If return_x is false, it will only return the boolean mask.
        """
        import pandas
        bool_mask = np.ones((X.shape[1],), dtype=np.bool)
        if len(self.filter_cols) == 0:
            if return_x:
                return X
            else:
                return bool_mask
        # otherwise...
        bool_mask[self.filter_cols] = False
        if not return_x:
            return bool_mask
        if type(X) is pandas.core.frame.DataFrame:
            return X[X.columns[bool_mask]]
        else:
            return X[:, bool_mask]
    
    def _reg_penalty(self, tot_new_feats, base_size):        
        remove_cols = np.argwhere(np.abs(self.coef_[-tot_new_feats:]) < self.reg_penalty)
        add_cols = np.argwhere(np.abs(self.coef_[-tot_new_feats:]) >= self.reg_penalty)
        base_coef = self.coef_[:-tot_new_feats].tolist()
        # adding new coefs
        base_coef = base_coef + self.coef_[-tot_new_feats:][add_cols].flatten().tolist()
        self.coef_ = np.array(base_coef)
        remove_cols_offset = [base_size + x for x in remove_cols]
        self.filter_cols.append(remove_cols_offset)
    
    def _partial_grafting_fit(self, X_, y):
        """
        Partial fit grafting method to expand the coefficient listing
        to taking into account new coefficients
        """
        # require to know the base shape to determine/
        # check for irrelevant columns in the future.
        self.base_shape = self.coef_.shape[0]
        
        X = self._fit_columns(X_)
        n_samples, n_features = X.shape
        coef_list = np.zeros(n_features, dtype=np.float64, order="C")
        coef_list[:self.coef_.shape[0]] = self.coef_.copy()
        self.coef_ = coef_list.copy()
        
    def partial_fit(self, X, y, sample_weight=None):
        base_size = len(self.filter_cols) + self.coef_.shape[0]
        tot_new_feats = X.shape[1] - base_size
        self._partial_grafting_fit(X, y)
        super(GraftingRegressor, self).partial_fit(X, y, sample_weight=None)  
        
        # update parameters based on weight of regularizer penalty
        self._reg_penalty(tot_new_feats, base_size)
        return self
    
    def predict(self, X):
        X = self._fit_columns(X)
        return super(GraftingRegressor, self).predict(X)

In [104]:
model.coef_[-5:][[0,3]].tolist() + model.coef_[-5:][[0,3]].tolist()


Out[104]:
[-1.2303520088786952,
 -0.29755658847192473,
 -1.2303520088786952,
 -0.29755658847192473]

In [105]:
model = GraftingRegressor(max_iter=1000)
model.fit(X1, y)


Out[105]:
GraftingRegressor(alpha=0.0001, average=False, epsilon=0.1, eta0=0.01,
         fit_intercept=True, l1_ratio=0.15, learning_rate='invscaling',
         loss='squared_loss', max_iter=1000, n_iter=None, penalty='l2',
         power_t=0.25, random_state=None, reg_penalty=0.15, shuffle=True,
         tol=None, verbose=0, warm_start=False)

In [106]:
len(model.coef_)


Out[106]:
50

In [107]:
model.partial_fit(pdf, y)


Out[107]:
GraftingRegressor(alpha=0.0001, average=False, epsilon=0.1, eta0=0.01,
         fit_intercept=True, l1_ratio=0.15, learning_rate='invscaling',
         loss='squared_loss', max_iter=1000, n_iter=None, penalty='l2',
         power_t=0.25, random_state=None, reg_penalty=0.15, shuffle=True,
         tol=None, verbose=0, warm_start=False)

In [108]:
len(model.coef_)


Out[108]:
97

In [109]:
model.predict(pdf)


Out[109]:
array([  -8.54434025,  253.67110568,  173.96532587,  121.50769307,
         34.05225317,  -70.32430236,  159.74561303,  -44.71723753,
       -156.98893749,  170.30902433,  -52.37927023,  250.22915706,
       -127.50252436,  -37.90946018,  416.65513397,  -37.25241649,
         10.03194315,  108.76249591,  130.89598554,  256.74132194,
         18.00703619,  300.65763551,  -39.18861335,   95.85562179,
        -16.85161846,  -13.44853864,   17.8401801 ,  243.40463137,
        -37.51576435,  158.6283454 ,  141.07229213, -112.72008839,
        141.21216558, -217.4730374 , -165.97622106,   40.95326168,
         71.16952663, -242.50460099, -130.06545394, -202.17815434,
         -7.16533651, -162.72823365,  170.03835376,  117.36209976,
         43.40393648,   43.72662798,  122.57368727,  113.79998753,
          6.53690737, -310.48511863,  -40.67756062,  196.48324148,
        -58.14588667,   96.94196195, -168.3004843 ,  -68.40367592,
         88.51250958,  -65.82738478, -118.67552562,   38.81768855,
        182.53923576, -162.22492556,   76.39392767,  124.53825151,
       -175.4246067 , -183.99692088,  -16.22102002,  241.21642751,
       -153.31815756, -203.46955645,   64.29950946,   96.0132368 ,
        117.67466275, -100.2667357 ,  227.84059597,  -22.74215652,
        -96.48048892,  -57.87322888, -202.2944065 ,  -28.01433959,
        148.26935145, -348.16142807,   33.60294132, -203.200728  ,
        -44.67017669,   -4.93240092,   92.23779673,   97.6960918 ,
        324.31044433,   14.3910425 , -145.78923224,  -12.10674884,
       -161.66390725, -182.88296594,   56.75854608, -177.52837694,
         -6.9329069 ,   99.21266329,  198.4797368 ,   43.07216191])