In [1]:
import os
import sys
import operator
import numpy as np
import pandas as pd
from scipy import sparse
import xgboost as xgb
import random
from sklearn import model_selection, preprocessing, ensemble
from sklearn.metrics import log_loss
from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer

import pickle

import sklearn.cluster

import Levenshtein

from multiprocessing import Pool

In [2]:
train_df = pd.read_pickle('fin-dprep-train.pkl')
test_df = pd.read_pickle('fin-dprep-test.pkl')

features_to_use = pickle.load(open('fin-dprep-flist.pkl', 'rb'))

medium_price = pd.read_pickle('fin-medium-price-r2.pkl')

train_df = pd.merge(train_df, medium_price, left_on='listing_id', right_index=True)
test_df = pd.merge(test_df, medium_price, left_on='listing_id', right_index=True)

In [3]:
adams = pd.read_pickle('features-adams.pkl')

train_df = pd.merge(train_df, adams, left_on='listing_id', right_index=True)
test_df = pd.merge(test_df, adams, left_on='listing_id', right_index=True)

In [4]:
train_df["predicted_price_diff"] = np.log(train_df["price"]) - np.log(train_df["predicted_price"])
test_df["predicted_price_diff"] = np.log(test_df["price"]) - np.log(test_df["predicted_price"])

In [5]:
class MeansProcessor:
    def __init__(self, key, outkey = None, tgt = 'interest_cat'):
        self.key = key
        self.outkey = key if outkey is None else outkey
        
        self.count = {}
        self.means = {}
        self.std = {}
        self.global_means = 0
        
        self.tgt = tgt
        
        self.outkeys = [self.outkey + '_level', self.outkey + '_level_std']
        
    def fit(self, df):
        self.global_means = df[self.tgt].mean()
            
        for k in df.groupby(self.key, sort=False):
            
            self.count[k[0]] = len(k[1])

            if len(k[1]) < 0:
                self.means[k[0]] = np.nan
                self.std[k[0]] = np.nan
            else:
                self.means[k[0]] = np.mean(k[1][self.tgt])
                self.std[k[0]] = np.std(k[1][self.tgt])
            
    def predict(self, df):
        for l in self.outkeys:
            df[l] = np.nan # self.global_means[l]
            
        df[self.outkey + '_count'] = 0
            
        for k in df.groupby(self.key, sort=False):
            if k[0] == 0:
                continue
            
            if k[0] in self.means:
                df.loc[k[1].index, self.outkey + '_count'] = self.count[k[0]]
                df.loc[k[1].index, self.outkey + '_level'] = self.means[k[0]]
                df.loc[k[1].index, self.outkey + '_level_std'] = self.std[k[0]]
        
        return df
    
    def get_features(self):
        return self.outkeys.copy() + [self.outkey + '_count']

# i kept the same index randomization (with fixed seed) so I could validate this code against
# the original...

target_num_map = {'low':0, 'medium':1, 'high':2}
train_y = np.array(train_df['interest_level'].apply(lambda x: target_num_map[x]))

def proc_fold(fold):
    train_index = fold[0]
    test_index = fold[1]
    
    cv_train = train_df.iloc[train_index]
    cv_valid = train_df.iloc[test_index][['interest_level', 'manager_id', 'building_id']]
    cv_test = test_df.copy()
    
    m_build = MeansProcessor('building_id', 'building_sort')
    m_build.fit(cv_train)
    cv_valid = m_build.predict(cv_valid)
    cv_test = m_build.predict(cv_test)

    m_mgr = MeansProcessor('manager_id', 'manager_sort')
    m_mgr.fit(cv_train)
    cv_valid = m_mgr.predict(cv_valid)
    cv_test = m_mgr.predict(cv_test)

    m_comb = MeansProcessor(['building_id', 'manager_id'], 'mb_comb')
    m_comb.fit(cv_train)
    cv_valid = m_comb.predict(cv_valid)
    cv_test = m_comb.predict(cv_test)

    return cv_train, cv_valid, cv_test

kf = model_selection.StratifiedKFold(n_splits=5, shuffle=True, random_state=2016)
folds = [(k[0], k[1]) for k in kf.split(list(range(train_df.shape[0])), train_y)]

#with Pool(5) as pool:
#    rv = pool.map(proc_fold, folds)

import pickle

try:
    rv = pickle.load(open('0420-model-groupfeatures.pkl', 'rb'))
except:
    with Pool(5) as pool:
        rv = pool.map(proc_fold, folds)

        pickle.dump(rv, open('0420-model-groupfeatures.pkl', 'wb'))

# dummies to get feature id's
m_build = MeansProcessor('building_id', 'building_sort')
m_mgr = MeansProcessor('manager_id', 'manager_sort')
m_comb = MeansProcessor(['building_id', 'manager_id'], 'mb_comb')

group_features = m_build.get_features() + m_mgr.get_features() + m_comb.get_features()

cv_test = []
for r in rv:
    cv_test.append(test_df.merge(r[2][group_features], left_index=True, right_index=True))

cv_allvalid = pd.concat([r[1] for r in rv])

train_df = train_df.merge(cv_allvalid[group_features], left_index=True, right_index=True)

In [6]:
train_ids = []
val_ids = []

for dev_index, val_index in kf.split(range(train_df.shape[0]), train_df.interest_cat):
    train_ids.append(train_df.iloc[dev_index].listing_id.values)
    val_ids.append(train_df.iloc[val_index].listing_id.values)

In [7]:
adams_features = ['num_rot15_X', 'num_rot15_Y', 'num_rot30_X', 'num_rot30_Y', 'num_rot45_X', 'num_rot45_Y', 'num_rot60_X', 'num_rot60_Y', 'num_rho', 'num_phi', 'num_cap_share', 'num_nr_of_lines', 'num_redacted', 'num_email', 'num_phone_nr']

In [8]:
#fl = features_to_use + m_build.get_features() + m_mgr.get_features() + m_comb.get_features() + tfidf_fn

fl = features_to_use.copy() + group_features + adams_features.copy()

#fl.remove('price')
#fl.remove('price_t')
#fl.remove('price_per_room')
fl.append('predicted_price')
fl.append('predicted_price_diff')

fl.append('manager_lazy_rate')

fl.append('density_exp01')

In [15]:
def run3_to_stackdf(run):
    
    df_testpreds3 = pd.DataFrame(run[2].mean(axis=0))
    df_testpreds3.columns = ['low', 'medium', 'high']
    df_testpreds3['listing_id'] = test_df.listing_id

    df_allpreds3 = pd.concat([run[1][['low', 'medium', 'high', 'listing_id']], df_testpreds3])

    df_allpreds3.sort_values('listing_id', inplace=True)
    df_allpreds3.set_index('listing_id', inplace=True)
    
    df_fold = []
    for f in range(run[2].shape[0]):
        df_fold.append(pd.DataFrame(run[2][f]))
        df_fold[-1]['listing_id'] = test_df.listing_id
        df_fold[-1].sort_values('listing_id', inplace=True)
        df_fold[-1].set_index('listing_id', inplace=True)

    return (df_allpreds3, df_fold)

In [16]:
def runXGB(train_X, train_y, test_X, test_y=None, feature_names=None, seed_val=0, num_rounds=4000):
    param = {}
    param['objective'] = 'multi:softprob'
    #param['tree_method'] = 'hist'
    param['eta'] = 0.02
    param['max_depth'] = 6
    param['silent'] = 1
    param['num_class'] = 3
    param['eval_metric'] = "mlogloss"
    param['min_child_weight'] = 1
    param['subsample'] = 0.7
    param['colsample_bytree'] = 0.7
    param['seed'] = seed_val
    #param['base_score'] = [np.mean(train_y == i) for i in [0, 1, 2]]
    num_rounds = num_rounds

    plst = list(param.items())
    xgtrain = xgb.DMatrix(train_X, label=train_y)

    if test_y is not None:
        xgtest = xgb.DMatrix(test_X, label=test_y)
        watchlist = [ (xgtrain,'train'), (xgtest, 'test') ]
        model = xgb.train(plst, xgtrain, num_rounds, watchlist, early_stopping_rounds=50, verbose_eval=10)
    else:
        xgtest = xgb.DMatrix(test_X)
        model = xgb.train(plst, xgtrain, num_rounds)

    pred_test_y = model.predict(xgtest, ntree_limit=model.best_ntree_limit)
    return pred_test_y, model

In [17]:
def run_cv(train_df, cv_test, kf, features_to_use):
    train_X = train_df[features_to_use]
    train_y = np.array(train_df['interest_level'].apply(lambda x: target_num_map[x]))

    cv_preds = []
    cv_scores = []
    models = []
    test_preds = []
    
    fold = 0

    for dev_index, val_index in kf.split(range(train_X.shape[0]), train_y):

        dev_X, val_X = train_X.iloc[dev_index], train_X.iloc[val_index]
        dev_y, val_y = train_y[dev_index], train_y[val_index]
        preds, model = runXGB(dev_X, dev_y, val_X, val_y)
        models.append(model)

        cv_scores.append(log_loss(val_y, preds))
        print(cv_scores)

        cut_df = train_df.iloc[val_index]
        out_df = pd.DataFrame(preds)
        out_df.columns = ["low", "medium", "high"]
        out_df["listing_id"] = cut_df.listing_id.values
        interest = cut_df.interest_level.apply(lambda x: target_num_map[x])
        out_df['interest_tgt'] = interest.values

        cv_preds.append(out_df)

        xgtest = xgb.DMatrix(cv_test[fold][features_to_use])
        test_preds.append(model.predict(xgtest, ntree_limit=model.best_ntree_limit))

    df_cv = pd.concat(cv_preds)
    print(log_loss(df_cv.interest_tgt, df_cv[['low', 'medium', 'high']]))

    apreds = np.array(test_preds)
    
    return models, df_cv, apreds

In [18]:
rv3 = run_cv(train_df, cv_test, kf, fl)


[0]	train-mlogloss:1.0843	test-mlogloss:1.08452
Multiple eval metrics have been passed: 'test-mlogloss' will be used for early stopping.

Will train until test-mlogloss hasn't improved in 50 rounds.
[10]	train-mlogloss:0.962632	test-mlogloss:0.965063
[20]	train-mlogloss:0.871514	test-mlogloss:0.875962
[30]	train-mlogloss:0.800614	test-mlogloss:0.80751
[40]	train-mlogloss:0.745262	test-mlogloss:0.754446
[50]	train-mlogloss:0.700616	test-mlogloss:0.712114
[60]	train-mlogloss:0.665245	test-mlogloss:0.679158
[70]	train-mlogloss:0.636389	test-mlogloss:0.652703
[80]	train-mlogloss:0.612529	test-mlogloss:0.631245
[90]	train-mlogloss:0.592969	test-mlogloss:0.613856
[100]	train-mlogloss:0.576529	test-mlogloss:0.599517
[110]	train-mlogloss:0.562609	test-mlogloss:0.587732
[120]	train-mlogloss:0.550703	test-mlogloss:0.577969
[130]	train-mlogloss:0.540578	test-mlogloss:0.569841
[140]	train-mlogloss:0.531801	test-mlogloss:0.562976
[150]	train-mlogloss:0.524123	test-mlogloss:0.557312
[160]	train-mlogloss:0.517427	test-mlogloss:0.552572
[170]	train-mlogloss:0.511243	test-mlogloss:0.548362
[180]	train-mlogloss:0.505627	test-mlogloss:0.54462
[190]	train-mlogloss:0.500577	test-mlogloss:0.541516
[200]	train-mlogloss:0.496057	test-mlogloss:0.538659
[210]	train-mlogloss:0.491808	test-mlogloss:0.536176
[220]	train-mlogloss:0.487873	test-mlogloss:0.534084
[230]	train-mlogloss:0.484331	test-mlogloss:0.532278
[240]	train-mlogloss:0.480666	test-mlogloss:0.53036
[250]	train-mlogloss:0.477285	test-mlogloss:0.528804
[260]	train-mlogloss:0.47432	test-mlogloss:0.527401
[270]	train-mlogloss:0.471564	test-mlogloss:0.526083
[280]	train-mlogloss:0.468669	test-mlogloss:0.524847
[290]	train-mlogloss:0.465895	test-mlogloss:0.523686
[300]	train-mlogloss:0.463197	test-mlogloss:0.522611
[310]	train-mlogloss:0.460487	test-mlogloss:0.521429
[320]	train-mlogloss:0.457938	test-mlogloss:0.520591
[330]	train-mlogloss:0.455568	test-mlogloss:0.519834
[340]	train-mlogloss:0.453265	test-mlogloss:0.519078
[350]	train-mlogloss:0.451087	test-mlogloss:0.518366
[360]	train-mlogloss:0.448954	test-mlogloss:0.517783
[370]	train-mlogloss:0.446849	test-mlogloss:0.517062
[380]	train-mlogloss:0.444869	test-mlogloss:0.5165
[390]	train-mlogloss:0.442804	test-mlogloss:0.515907
[400]	train-mlogloss:0.440687	test-mlogloss:0.515336
[410]	train-mlogloss:0.438678	test-mlogloss:0.514888
[420]	train-mlogloss:0.436954	test-mlogloss:0.514387
[430]	train-mlogloss:0.435228	test-mlogloss:0.513875
[440]	train-mlogloss:0.433318	test-mlogloss:0.513475
[450]	train-mlogloss:0.431625	test-mlogloss:0.513108
[460]	train-mlogloss:0.429857	test-mlogloss:0.512725
[470]	train-mlogloss:0.428154	test-mlogloss:0.512363
[480]	train-mlogloss:0.426557	test-mlogloss:0.511999
[490]	train-mlogloss:0.424958	test-mlogloss:0.511635
[500]	train-mlogloss:0.423205	test-mlogloss:0.511248
[510]	train-mlogloss:0.421512	test-mlogloss:0.510888
[520]	train-mlogloss:0.419972	test-mlogloss:0.510564
[530]	train-mlogloss:0.418096	test-mlogloss:0.510237
[540]	train-mlogloss:0.41628	test-mlogloss:0.509821
[550]	train-mlogloss:0.414723	test-mlogloss:0.50958
[560]	train-mlogloss:0.412994	test-mlogloss:0.509304
[570]	train-mlogloss:0.411486	test-mlogloss:0.509035
[580]	train-mlogloss:0.409767	test-mlogloss:0.508842
[590]	train-mlogloss:0.408127	test-mlogloss:0.508621
[600]	train-mlogloss:0.406433	test-mlogloss:0.508357
[610]	train-mlogloss:0.404847	test-mlogloss:0.508142
[620]	train-mlogloss:0.40339	test-mlogloss:0.507952
[630]	train-mlogloss:0.401846	test-mlogloss:0.507808
[640]	train-mlogloss:0.400383	test-mlogloss:0.50764
[650]	train-mlogloss:0.398718	test-mlogloss:0.507481
[660]	train-mlogloss:0.397126	test-mlogloss:0.507334
[670]	train-mlogloss:0.39575	test-mlogloss:0.507187
[680]	train-mlogloss:0.394337	test-mlogloss:0.507069
[690]	train-mlogloss:0.392872	test-mlogloss:0.506936
[700]	train-mlogloss:0.391407	test-mlogloss:0.506771
[710]	train-mlogloss:0.389959	test-mlogloss:0.506608
[720]	train-mlogloss:0.388267	test-mlogloss:0.506379
[730]	train-mlogloss:0.386816	test-mlogloss:0.506181
[740]	train-mlogloss:0.38532	test-mlogloss:0.505946
[750]	train-mlogloss:0.384012	test-mlogloss:0.505938
[760]	train-mlogloss:0.382522	test-mlogloss:0.505894
[770]	train-mlogloss:0.380949	test-mlogloss:0.505814
[780]	train-mlogloss:0.379504	test-mlogloss:0.505735
[790]	train-mlogloss:0.378122	test-mlogloss:0.505645
[800]	train-mlogloss:0.376661	test-mlogloss:0.505621
[810]	train-mlogloss:0.375335	test-mlogloss:0.505439
[820]	train-mlogloss:0.373878	test-mlogloss:0.505299
[830]	train-mlogloss:0.372356	test-mlogloss:0.50525
[840]	train-mlogloss:0.371048	test-mlogloss:0.505213
[850]	train-mlogloss:0.369632	test-mlogloss:0.505144
[860]	train-mlogloss:0.36851	test-mlogloss:0.505121
[870]	train-mlogloss:0.366969	test-mlogloss:0.505027
[880]	train-mlogloss:0.365698	test-mlogloss:0.504924
[890]	train-mlogloss:0.364287	test-mlogloss:0.504829
[900]	train-mlogloss:0.362869	test-mlogloss:0.504778
[910]	train-mlogloss:0.361409	test-mlogloss:0.504671
[920]	train-mlogloss:0.359955	test-mlogloss:0.504559
[930]	train-mlogloss:0.3586	test-mlogloss:0.504512
[940]	train-mlogloss:0.357286	test-mlogloss:0.504529
[950]	train-mlogloss:0.355944	test-mlogloss:0.50434
[960]	train-mlogloss:0.354625	test-mlogloss:0.504263
[970]	train-mlogloss:0.353351	test-mlogloss:0.5042
[980]	train-mlogloss:0.352217	test-mlogloss:0.504188
[990]	train-mlogloss:0.350983	test-mlogloss:0.504131
[1000]	train-mlogloss:0.349605	test-mlogloss:0.504078
[1010]	train-mlogloss:0.348465	test-mlogloss:0.504065
[1020]	train-mlogloss:0.347195	test-mlogloss:0.50405
[1030]	train-mlogloss:0.34616	test-mlogloss:0.504
[1040]	train-mlogloss:0.344973	test-mlogloss:0.504012
[1050]	train-mlogloss:0.343676	test-mlogloss:0.503997
[1060]	train-mlogloss:0.342548	test-mlogloss:0.503949
[1070]	train-mlogloss:0.341165	test-mlogloss:0.503902
[1080]	train-mlogloss:0.340019	test-mlogloss:0.503825
[1090]	train-mlogloss:0.338769	test-mlogloss:0.503852
[1100]	train-mlogloss:0.337513	test-mlogloss:0.503765
[1110]	train-mlogloss:0.33628	test-mlogloss:0.503702
[1120]	train-mlogloss:0.335216	test-mlogloss:0.503638
[1130]	train-mlogloss:0.333951	test-mlogloss:0.503662
[1140]	train-mlogloss:0.33272	test-mlogloss:0.503663
[1150]	train-mlogloss:0.331435	test-mlogloss:0.50358
[1160]	train-mlogloss:0.330239	test-mlogloss:0.503633
[1170]	train-mlogloss:0.328997	test-mlogloss:0.503613
[1180]	train-mlogloss:0.327721	test-mlogloss:0.50357
[1190]	train-mlogloss:0.326513	test-mlogloss:0.503569
Stopping. Best iteration:
[1148]	train-mlogloss:0.331644	test-mlogloss:0.503548

[0.50354797493140779]
[0]	train-mlogloss:1.08441	test-mlogloss:1.08447
Multiple eval metrics have been passed: 'test-mlogloss' will be used for early stopping.

Will train until test-mlogloss hasn't improved in 50 rounds.
[10]	train-mlogloss:0.963338	test-mlogloss:0.96512
[20]	train-mlogloss:0.872293	test-mlogloss:0.875789
[30]	train-mlogloss:0.801673	test-mlogloss:0.80687
[40]	train-mlogloss:0.746456	test-mlogloss:0.753397
[50]	train-mlogloss:0.701918	test-mlogloss:0.710507
[60]	train-mlogloss:0.666698	test-mlogloss:0.676959
[70]	train-mlogloss:0.637917	test-mlogloss:0.65002
[80]	train-mlogloss:0.614188	test-mlogloss:0.62805
[90]	train-mlogloss:0.59472	test-mlogloss:0.610331
[100]	train-mlogloss:0.578207	test-mlogloss:0.59564
[110]	train-mlogloss:0.564296	test-mlogloss:0.583486
[120]	train-mlogloss:0.552471	test-mlogloss:0.573411
[130]	train-mlogloss:0.542331	test-mlogloss:0.565022
[140]	train-mlogloss:0.533491	test-mlogloss:0.557949
[150]	train-mlogloss:0.525761	test-mlogloss:0.55193
[160]	train-mlogloss:0.518923	test-mlogloss:0.546847
[170]	train-mlogloss:0.512907	test-mlogloss:0.542477
[180]	train-mlogloss:0.507334	test-mlogloss:0.538635
[190]	train-mlogloss:0.502388	test-mlogloss:0.535317
[200]	train-mlogloss:0.497888	test-mlogloss:0.532502
[210]	train-mlogloss:0.493461	test-mlogloss:0.530008
[220]	train-mlogloss:0.489623	test-mlogloss:0.527737
[230]	train-mlogloss:0.485919	test-mlogloss:0.525744
[240]	train-mlogloss:0.482474	test-mlogloss:0.523962
[250]	train-mlogloss:0.479292	test-mlogloss:0.522355
[260]	train-mlogloss:0.476407	test-mlogloss:0.520875
[270]	train-mlogloss:0.473338	test-mlogloss:0.51953
[280]	train-mlogloss:0.470426	test-mlogloss:0.518263
[290]	train-mlogloss:0.467696	test-mlogloss:0.517177
[300]	train-mlogloss:0.465222	test-mlogloss:0.516321
[310]	train-mlogloss:0.462826	test-mlogloss:0.51541
[320]	train-mlogloss:0.460516	test-mlogloss:0.514552
[330]	train-mlogloss:0.457944	test-mlogloss:0.513648
[340]	train-mlogloss:0.455696	test-mlogloss:0.512938
[350]	train-mlogloss:0.453398	test-mlogloss:0.512143
[360]	train-mlogloss:0.451104	test-mlogloss:0.511343
[370]	train-mlogloss:0.448978	test-mlogloss:0.510797
[380]	train-mlogloss:0.446959	test-mlogloss:0.510284
[390]	train-mlogloss:0.444992	test-mlogloss:0.509778
[400]	train-mlogloss:0.443042	test-mlogloss:0.50922
[410]	train-mlogloss:0.440953	test-mlogloss:0.508712
[420]	train-mlogloss:0.438952	test-mlogloss:0.5083
[430]	train-mlogloss:0.437078	test-mlogloss:0.507833
[440]	train-mlogloss:0.435347	test-mlogloss:0.507542
[450]	train-mlogloss:0.433531	test-mlogloss:0.507203
[460]	train-mlogloss:0.431669	test-mlogloss:0.506852
[470]	train-mlogloss:0.429938	test-mlogloss:0.506513
[480]	train-mlogloss:0.428168	test-mlogloss:0.506195
[490]	train-mlogloss:0.426265	test-mlogloss:0.505867
[500]	train-mlogloss:0.42444	test-mlogloss:0.505696
[510]	train-mlogloss:0.422833	test-mlogloss:0.50536
[520]	train-mlogloss:0.42106	test-mlogloss:0.505053
[530]	train-mlogloss:0.419275	test-mlogloss:0.504855
[540]	train-mlogloss:0.417739	test-mlogloss:0.504622
[550]	train-mlogloss:0.415985	test-mlogloss:0.504339
[560]	train-mlogloss:0.41443	test-mlogloss:0.503984
[570]	train-mlogloss:0.41288	test-mlogloss:0.503696
[580]	train-mlogloss:0.411124	test-mlogloss:0.50348
[590]	train-mlogloss:0.409498	test-mlogloss:0.503242
[600]	train-mlogloss:0.407858	test-mlogloss:0.503054
[610]	train-mlogloss:0.406342	test-mlogloss:0.5029
[620]	train-mlogloss:0.404835	test-mlogloss:0.502742
[630]	train-mlogloss:0.403322	test-mlogloss:0.502587
[640]	train-mlogloss:0.401903	test-mlogloss:0.502477
[650]	train-mlogloss:0.400476	test-mlogloss:0.502355
[660]	train-mlogloss:0.39893	test-mlogloss:0.502176
[670]	train-mlogloss:0.397316	test-mlogloss:0.502036
[680]	train-mlogloss:0.395909	test-mlogloss:0.501942
[690]	train-mlogloss:0.394373	test-mlogloss:0.501752
[700]	train-mlogloss:0.392947	test-mlogloss:0.501647
[710]	train-mlogloss:0.391624	test-mlogloss:0.501503
[720]	train-mlogloss:0.390085	test-mlogloss:0.501304
[730]	train-mlogloss:0.388485	test-mlogloss:0.501104
[740]	train-mlogloss:0.387125	test-mlogloss:0.501058
[750]	train-mlogloss:0.385653	test-mlogloss:0.500946
[760]	train-mlogloss:0.38423	test-mlogloss:0.500878
[770]	train-mlogloss:0.382705	test-mlogloss:0.500801
[780]	train-mlogloss:0.381314	test-mlogloss:0.500695
[790]	train-mlogloss:0.379896	test-mlogloss:0.500481
[800]	train-mlogloss:0.378673	test-mlogloss:0.500342
[810]	train-mlogloss:0.377267	test-mlogloss:0.500304
[820]	train-mlogloss:0.375808	test-mlogloss:0.500208
[830]	train-mlogloss:0.37454	test-mlogloss:0.500205
[840]	train-mlogloss:0.373047	test-mlogloss:0.500074
[850]	train-mlogloss:0.371705	test-mlogloss:0.499973
[860]	train-mlogloss:0.370419	test-mlogloss:0.499936
[870]	train-mlogloss:0.369036	test-mlogloss:0.499895
[880]	train-mlogloss:0.3678	test-mlogloss:0.499854
[890]	train-mlogloss:0.3665	test-mlogloss:0.499792
[900]	train-mlogloss:0.365206	test-mlogloss:0.499683
[910]	train-mlogloss:0.363882	test-mlogloss:0.499602
[920]	train-mlogloss:0.362477	test-mlogloss:0.499523
[930]	train-mlogloss:0.361074	test-mlogloss:0.499432
[940]	train-mlogloss:0.359805	test-mlogloss:0.499419
[950]	train-mlogloss:0.358464	test-mlogloss:0.499283
[960]	train-mlogloss:0.357269	test-mlogloss:0.499233
[970]	train-mlogloss:0.355916	test-mlogloss:0.499079
[980]	train-mlogloss:0.35467	test-mlogloss:0.499032
[990]	train-mlogloss:0.353303	test-mlogloss:0.49894
[1000]	train-mlogloss:0.351995	test-mlogloss:0.498896
[1010]	train-mlogloss:0.350829	test-mlogloss:0.498867
[1020]	train-mlogloss:0.349608	test-mlogloss:0.498833
[1030]	train-mlogloss:0.34845	test-mlogloss:0.498787
[1040]	train-mlogloss:0.34714	test-mlogloss:0.498707
[1050]	train-mlogloss:0.345986	test-mlogloss:0.498684
[1060]	train-mlogloss:0.344971	test-mlogloss:0.498691
[1070]	train-mlogloss:0.343566	test-mlogloss:0.498622
[1080]	train-mlogloss:0.342284	test-mlogloss:0.498547
[1090]	train-mlogloss:0.340959	test-mlogloss:0.49846
[1100]	train-mlogloss:0.339795	test-mlogloss:0.498383
[1110]	train-mlogloss:0.33853	test-mlogloss:0.498366
[1120]	train-mlogloss:0.337245	test-mlogloss:0.498369
[1130]	train-mlogloss:0.33597	test-mlogloss:0.4983
[1140]	train-mlogloss:0.334827	test-mlogloss:0.498267
[1150]	train-mlogloss:0.333687	test-mlogloss:0.498134
[1160]	train-mlogloss:0.332545	test-mlogloss:0.498063
[1170]	train-mlogloss:0.33133	test-mlogloss:0.497993
[1180]	train-mlogloss:0.330098	test-mlogloss:0.497883
[1190]	train-mlogloss:0.328995	test-mlogloss:0.49781
[1200]	train-mlogloss:0.327856	test-mlogloss:0.497813
[1210]	train-mlogloss:0.326682	test-mlogloss:0.497791
[1220]	train-mlogloss:0.325566	test-mlogloss:0.497639
[1230]	train-mlogloss:0.324294	test-mlogloss:0.497647
[1240]	train-mlogloss:0.323246	test-mlogloss:0.49762
[1250]	train-mlogloss:0.322107	test-mlogloss:0.497593
[1260]	train-mlogloss:0.320932	test-mlogloss:0.497562
[1270]	train-mlogloss:0.319832	test-mlogloss:0.497478
[1280]	train-mlogloss:0.318678	test-mlogloss:0.497483
[1290]	train-mlogloss:0.317526	test-mlogloss:0.497437
[1300]	train-mlogloss:0.316343	test-mlogloss:0.497394
[1310]	train-mlogloss:0.315194	test-mlogloss:0.497328
[1320]	train-mlogloss:0.314014	test-mlogloss:0.497331
[1330]	train-mlogloss:0.313055	test-mlogloss:0.497315
[1340]	train-mlogloss:0.312034	test-mlogloss:0.497276
[1350]	train-mlogloss:0.310972	test-mlogloss:0.497395
[1360]	train-mlogloss:0.309897	test-mlogloss:0.497488
[1370]	train-mlogloss:0.308827	test-mlogloss:0.497463
[1380]	train-mlogloss:0.30767	test-mlogloss:0.497458
[1390]	train-mlogloss:0.306563	test-mlogloss:0.497535
Stopping. Best iteration:
[1340]	train-mlogloss:0.312034	test-mlogloss:0.497276

[0.50354797493140779, 0.49727635213071869]
[0]	train-mlogloss:1.08426	test-mlogloss:1.08473
Multiple eval metrics have been passed: 'test-mlogloss' will be used for early stopping.

Will train until test-mlogloss hasn't improved in 50 rounds.
[10]	train-mlogloss:0.962354	test-mlogloss:0.966784
[20]	train-mlogloss:0.870781	test-mlogloss:0.87868
[30]	train-mlogloss:0.799577	test-mlogloss:0.810948
[40]	train-mlogloss:0.744205	test-mlogloss:0.758608
[50]	train-mlogloss:0.699562	test-mlogloss:0.716695
[60]	train-mlogloss:0.664085	test-mlogloss:0.683849
[70]	train-mlogloss:0.635242	test-mlogloss:0.657538
[80]	train-mlogloss:0.611469	test-mlogloss:0.636129
[90]	train-mlogloss:0.591668	test-mlogloss:0.618659
[100]	train-mlogloss:0.575058	test-mlogloss:0.60424
[110]	train-mlogloss:0.561069	test-mlogloss:0.592427
[120]	train-mlogloss:0.549162	test-mlogloss:0.582647
[130]	train-mlogloss:0.538953	test-mlogloss:0.574408
[140]	train-mlogloss:0.530174	test-mlogloss:0.567637
[150]	train-mlogloss:0.522513	test-mlogloss:0.561946
[160]	train-mlogloss:0.51571	test-mlogloss:0.557206
[170]	train-mlogloss:0.509541	test-mlogloss:0.552922
[180]	train-mlogloss:0.504099	test-mlogloss:0.549406
[190]	train-mlogloss:0.499039	test-mlogloss:0.546253
[200]	train-mlogloss:0.494542	test-mlogloss:0.543474
[210]	train-mlogloss:0.490419	test-mlogloss:0.541056
[220]	train-mlogloss:0.486534	test-mlogloss:0.538834
[230]	train-mlogloss:0.482746	test-mlogloss:0.536719
[240]	train-mlogloss:0.479371	test-mlogloss:0.534935
[250]	train-mlogloss:0.476163	test-mlogloss:0.533408
[260]	train-mlogloss:0.473017	test-mlogloss:0.53202
[270]	train-mlogloss:0.470215	test-mlogloss:0.530825
[280]	train-mlogloss:0.467495	test-mlogloss:0.529702
[290]	train-mlogloss:0.464672	test-mlogloss:0.528519
[300]	train-mlogloss:0.461912	test-mlogloss:0.527592
[310]	train-mlogloss:0.459434	test-mlogloss:0.52673
[320]	train-mlogloss:0.456968	test-mlogloss:0.525802
[330]	train-mlogloss:0.454311	test-mlogloss:0.524903
[340]	train-mlogloss:0.452007	test-mlogloss:0.52426
[350]	train-mlogloss:0.449917	test-mlogloss:0.523643
[360]	train-mlogloss:0.447734	test-mlogloss:0.523065
[370]	train-mlogloss:0.445301	test-mlogloss:0.522406
[380]	train-mlogloss:0.443257	test-mlogloss:0.521839
[390]	train-mlogloss:0.441171	test-mlogloss:0.521375
[400]	train-mlogloss:0.439199	test-mlogloss:0.520917
[410]	train-mlogloss:0.437249	test-mlogloss:0.52042
[420]	train-mlogloss:0.435247	test-mlogloss:0.519972
[430]	train-mlogloss:0.433564	test-mlogloss:0.519614
[440]	train-mlogloss:0.431711	test-mlogloss:0.519282
[450]	train-mlogloss:0.429893	test-mlogloss:0.51893
[460]	train-mlogloss:0.427968	test-mlogloss:0.518588
[470]	train-mlogloss:0.426423	test-mlogloss:0.518248
[480]	train-mlogloss:0.424701	test-mlogloss:0.51798
[490]	train-mlogloss:0.422972	test-mlogloss:0.517625
[500]	train-mlogloss:0.421223	test-mlogloss:0.517306
[510]	train-mlogloss:0.41941	test-mlogloss:0.517073
[520]	train-mlogloss:0.417667	test-mlogloss:0.516773
[530]	train-mlogloss:0.416166	test-mlogloss:0.516626
[540]	train-mlogloss:0.41441	test-mlogloss:0.516381
[550]	train-mlogloss:0.412924	test-mlogloss:0.516156
[560]	train-mlogloss:0.411245	test-mlogloss:0.515916
[570]	train-mlogloss:0.409568	test-mlogloss:0.515626
[580]	train-mlogloss:0.408187	test-mlogloss:0.51539
[590]	train-mlogloss:0.406699	test-mlogloss:0.515215
[600]	train-mlogloss:0.404991	test-mlogloss:0.515124
[610]	train-mlogloss:0.403465	test-mlogloss:0.514865
[620]	train-mlogloss:0.40177	test-mlogloss:0.514694
[630]	train-mlogloss:0.400031	test-mlogloss:0.51442
[640]	train-mlogloss:0.398467	test-mlogloss:0.514233
[650]	train-mlogloss:0.397062	test-mlogloss:0.514072
[660]	train-mlogloss:0.395534	test-mlogloss:0.513916
[670]	train-mlogloss:0.393961	test-mlogloss:0.513689
[680]	train-mlogloss:0.392397	test-mlogloss:0.513418
[690]	train-mlogloss:0.390923	test-mlogloss:0.513178
[700]	train-mlogloss:0.389441	test-mlogloss:0.513011
[710]	train-mlogloss:0.387945	test-mlogloss:0.512965
[720]	train-mlogloss:0.386502	test-mlogloss:0.512822
[730]	train-mlogloss:0.385117	test-mlogloss:0.512737
[740]	train-mlogloss:0.383892	test-mlogloss:0.512516
[750]	train-mlogloss:0.382671	test-mlogloss:0.512389
[760]	train-mlogloss:0.381222	test-mlogloss:0.512272
[770]	train-mlogloss:0.379735	test-mlogloss:0.512119
[780]	train-mlogloss:0.378362	test-mlogloss:0.512072
[790]	train-mlogloss:0.376936	test-mlogloss:0.512047
[800]	train-mlogloss:0.375588	test-mlogloss:0.511921
[810]	train-mlogloss:0.374278	test-mlogloss:0.511752
[820]	train-mlogloss:0.372873	test-mlogloss:0.511569
[830]	train-mlogloss:0.371578	test-mlogloss:0.511432
[840]	train-mlogloss:0.370315	test-mlogloss:0.511311
[850]	train-mlogloss:0.369046	test-mlogloss:0.511137
[860]	train-mlogloss:0.36767	test-mlogloss:0.511073
[870]	train-mlogloss:0.366365	test-mlogloss:0.511045
[880]	train-mlogloss:0.364996	test-mlogloss:0.510975
[890]	train-mlogloss:0.363611	test-mlogloss:0.51087
[900]	train-mlogloss:0.362221	test-mlogloss:0.51079
[910]	train-mlogloss:0.360919	test-mlogloss:0.510684
[920]	train-mlogloss:0.359511	test-mlogloss:0.510571
[930]	train-mlogloss:0.35835	test-mlogloss:0.510513
[940]	train-mlogloss:0.357178	test-mlogloss:0.510547
[950]	train-mlogloss:0.355781	test-mlogloss:0.510484
[960]	train-mlogloss:0.354429	test-mlogloss:0.510396
[970]	train-mlogloss:0.353049	test-mlogloss:0.510353
[980]	train-mlogloss:0.351611	test-mlogloss:0.510288
[990]	train-mlogloss:0.350424	test-mlogloss:0.510237
[1000]	train-mlogloss:0.349147	test-mlogloss:0.510164
[1010]	train-mlogloss:0.347782	test-mlogloss:0.510067
[1020]	train-mlogloss:0.346452	test-mlogloss:0.510097
[1030]	train-mlogloss:0.345001	test-mlogloss:0.509948
[1040]	train-mlogloss:0.343686	test-mlogloss:0.509896
[1050]	train-mlogloss:0.342457	test-mlogloss:0.509862
[1060]	train-mlogloss:0.34129	test-mlogloss:0.509761
[1070]	train-mlogloss:0.340106	test-mlogloss:0.509657
[1080]	train-mlogloss:0.33887	test-mlogloss:0.509685
[1090]	train-mlogloss:0.337587	test-mlogloss:0.509662
[1100]	train-mlogloss:0.336529	test-mlogloss:0.5097
[1110]	train-mlogloss:0.335251	test-mlogloss:0.509607
[1120]	train-mlogloss:0.334053	test-mlogloss:0.50952
[1130]	train-mlogloss:0.332875	test-mlogloss:0.509482
[1140]	train-mlogloss:0.331785	test-mlogloss:0.509379
[1150]	train-mlogloss:0.33052	test-mlogloss:0.509339
[1160]	train-mlogloss:0.329345	test-mlogloss:0.509313
[1170]	train-mlogloss:0.3281	test-mlogloss:0.509281
[1180]	train-mlogloss:0.326889	test-mlogloss:0.509157
[1190]	train-mlogloss:0.325753	test-mlogloss:0.509109
[1200]	train-mlogloss:0.32447	test-mlogloss:0.509071
[1210]	train-mlogloss:0.323233	test-mlogloss:0.509133
[1220]	train-mlogloss:0.322028	test-mlogloss:0.509153
[1230]	train-mlogloss:0.320999	test-mlogloss:0.509164
[1240]	train-mlogloss:0.319809	test-mlogloss:0.509093
Stopping. Best iteration:
[1198]	train-mlogloss:0.324769	test-mlogloss:0.509041

[0.50354797493140779, 0.49727635213071869, 0.50904081005502133]
[0]	train-mlogloss:1.0843	test-mlogloss:1.0847
Multiple eval metrics have been passed: 'test-mlogloss' will be used for early stopping.

Will train until test-mlogloss hasn't improved in 50 rounds.
[10]	train-mlogloss:0.96257	test-mlogloss:0.966603
[20]	train-mlogloss:0.871066	test-mlogloss:0.878328
[30]	train-mlogloss:0.800099	test-mlogloss:0.810332
[40]	train-mlogloss:0.744821	test-mlogloss:0.757618
[50]	train-mlogloss:0.700201	test-mlogloss:0.715542
[60]	train-mlogloss:0.664688	test-mlogloss:0.682406
[70]	train-mlogloss:0.635823	test-mlogloss:0.655865
[80]	train-mlogloss:0.611975	test-mlogloss:0.634174
[90]	train-mlogloss:0.592279	test-mlogloss:0.616644
[100]	train-mlogloss:0.575817	test-mlogloss:0.602275
[110]	train-mlogloss:0.561844	test-mlogloss:0.590401
[120]	train-mlogloss:0.54992	test-mlogloss:0.580621
[130]	train-mlogloss:0.539747	test-mlogloss:0.572348
[140]	train-mlogloss:0.530845	test-mlogloss:0.565423
[150]	train-mlogloss:0.523199	test-mlogloss:0.559518
[160]	train-mlogloss:0.516331	test-mlogloss:0.5544
[170]	train-mlogloss:0.510257	test-mlogloss:0.550114
[180]	train-mlogloss:0.504637	test-mlogloss:0.546228
[190]	train-mlogloss:0.499619	test-mlogloss:0.542805
[200]	train-mlogloss:0.495018	test-mlogloss:0.539955
[210]	train-mlogloss:0.490889	test-mlogloss:0.537424
[220]	train-mlogloss:0.48685	test-mlogloss:0.535091
[230]	train-mlogloss:0.483195	test-mlogloss:0.5331
[240]	train-mlogloss:0.479688	test-mlogloss:0.531309
[250]	train-mlogloss:0.476225	test-mlogloss:0.529677
[260]	train-mlogloss:0.473105	test-mlogloss:0.528236
[270]	train-mlogloss:0.470114	test-mlogloss:0.526924
[280]	train-mlogloss:0.467271	test-mlogloss:0.525814
[290]	train-mlogloss:0.464591	test-mlogloss:0.524661
[300]	train-mlogloss:0.462065	test-mlogloss:0.523795
[310]	train-mlogloss:0.459373	test-mlogloss:0.522848
[320]	train-mlogloss:0.456814	test-mlogloss:0.521983
[330]	train-mlogloss:0.454387	test-mlogloss:0.521174
[340]	train-mlogloss:0.452069	test-mlogloss:0.52035
[350]	train-mlogloss:0.449827	test-mlogloss:0.519663
[360]	train-mlogloss:0.447658	test-mlogloss:0.518948
[370]	train-mlogloss:0.4456	test-mlogloss:0.518367
[380]	train-mlogloss:0.443258	test-mlogloss:0.517796
[390]	train-mlogloss:0.441213	test-mlogloss:0.51728
[400]	train-mlogloss:0.439221	test-mlogloss:0.516685
[410]	train-mlogloss:0.43726	test-mlogloss:0.516183
[420]	train-mlogloss:0.435591	test-mlogloss:0.515699
[430]	train-mlogloss:0.433517	test-mlogloss:0.515236
[440]	train-mlogloss:0.431896	test-mlogloss:0.514953
[450]	train-mlogloss:0.43002	test-mlogloss:0.514533
[460]	train-mlogloss:0.428287	test-mlogloss:0.514197
[470]	train-mlogloss:0.426531	test-mlogloss:0.513803
[480]	train-mlogloss:0.424743	test-mlogloss:0.513481
[490]	train-mlogloss:0.422938	test-mlogloss:0.513322
[500]	train-mlogloss:0.421196	test-mlogloss:0.513009
[510]	train-mlogloss:0.419446	test-mlogloss:0.512763
[520]	train-mlogloss:0.418057	test-mlogloss:0.512542
[530]	train-mlogloss:0.416336	test-mlogloss:0.512287
[540]	train-mlogloss:0.414579	test-mlogloss:0.512071
[550]	train-mlogloss:0.41291	test-mlogloss:0.511819
[560]	train-mlogloss:0.411506	test-mlogloss:0.511657
[570]	train-mlogloss:0.410062	test-mlogloss:0.511449
[580]	train-mlogloss:0.408554	test-mlogloss:0.511278
[590]	train-mlogloss:0.406948	test-mlogloss:0.511126
[600]	train-mlogloss:0.405218	test-mlogloss:0.511036
[610]	train-mlogloss:0.403522	test-mlogloss:0.510878
[620]	train-mlogloss:0.40191	test-mlogloss:0.510636
[630]	train-mlogloss:0.400306	test-mlogloss:0.510392
[640]	train-mlogloss:0.398943	test-mlogloss:0.510222
[650]	train-mlogloss:0.397394	test-mlogloss:0.510102
[660]	train-mlogloss:0.395815	test-mlogloss:0.509959
[670]	train-mlogloss:0.394188	test-mlogloss:0.509811
[680]	train-mlogloss:0.392644	test-mlogloss:0.509661
[690]	train-mlogloss:0.391195	test-mlogloss:0.509526
[700]	train-mlogloss:0.389678	test-mlogloss:0.509392
[710]	train-mlogloss:0.388326	test-mlogloss:0.509219
[720]	train-mlogloss:0.386889	test-mlogloss:0.509137
[730]	train-mlogloss:0.385465	test-mlogloss:0.50896
[740]	train-mlogloss:0.383957	test-mlogloss:0.508974
[750]	train-mlogloss:0.382314	test-mlogloss:0.50882
[760]	train-mlogloss:0.380806	test-mlogloss:0.508721
[770]	train-mlogloss:0.379301	test-mlogloss:0.508567
[780]	train-mlogloss:0.377876	test-mlogloss:0.508469
[790]	train-mlogloss:0.376585	test-mlogloss:0.508399
[800]	train-mlogloss:0.375081	test-mlogloss:0.508344
[810]	train-mlogloss:0.373704	test-mlogloss:0.508262
[820]	train-mlogloss:0.372186	test-mlogloss:0.50814
[830]	train-mlogloss:0.370851	test-mlogloss:0.508021
[840]	train-mlogloss:0.369565	test-mlogloss:0.507855
[850]	train-mlogloss:0.368268	test-mlogloss:0.507857
[860]	train-mlogloss:0.366846	test-mlogloss:0.507776
[870]	train-mlogloss:0.365489	test-mlogloss:0.507637
[880]	train-mlogloss:0.363997	test-mlogloss:0.507527
[890]	train-mlogloss:0.362688	test-mlogloss:0.507513
[900]	train-mlogloss:0.361354	test-mlogloss:0.507454
[910]	train-mlogloss:0.359985	test-mlogloss:0.507398
[920]	train-mlogloss:0.358702	test-mlogloss:0.507347
[930]	train-mlogloss:0.357415	test-mlogloss:0.507253
[940]	train-mlogloss:0.356102	test-mlogloss:0.50712
[950]	train-mlogloss:0.354722	test-mlogloss:0.507072
[960]	train-mlogloss:0.353535	test-mlogloss:0.507031
[970]	train-mlogloss:0.352123	test-mlogloss:0.507001
[980]	train-mlogloss:0.350841	test-mlogloss:0.506907
[990]	train-mlogloss:0.349527	test-mlogloss:0.50684
[1000]	train-mlogloss:0.348065	test-mlogloss:0.506725
[1010]	train-mlogloss:0.346883	test-mlogloss:0.506746
[1020]	train-mlogloss:0.345745	test-mlogloss:0.506719
[1030]	train-mlogloss:0.344487	test-mlogloss:0.506677
[1040]	train-mlogloss:0.343146	test-mlogloss:0.506574
[1050]	train-mlogloss:0.341875	test-mlogloss:0.506514
[1060]	train-mlogloss:0.340505	test-mlogloss:0.506429
[1070]	train-mlogloss:0.339232	test-mlogloss:0.50633
[1080]	train-mlogloss:0.337991	test-mlogloss:0.506265
[1090]	train-mlogloss:0.336845	test-mlogloss:0.506233
[1100]	train-mlogloss:0.335681	test-mlogloss:0.506185
[1110]	train-mlogloss:0.334446	test-mlogloss:0.506179
[1120]	train-mlogloss:0.333324	test-mlogloss:0.506201
[1130]	train-mlogloss:0.332089	test-mlogloss:0.506163
[1140]	train-mlogloss:0.330955	test-mlogloss:0.506104
[1150]	train-mlogloss:0.329747	test-mlogloss:0.506074
[1160]	train-mlogloss:0.328571	test-mlogloss:0.505996
[1170]	train-mlogloss:0.327361	test-mlogloss:0.506024
[1180]	train-mlogloss:0.326179	test-mlogloss:0.505952
[1190]	train-mlogloss:0.325048	test-mlogloss:0.505891
[1200]	train-mlogloss:0.323968	test-mlogloss:0.505875
[1210]	train-mlogloss:0.322867	test-mlogloss:0.505808
[1220]	train-mlogloss:0.321694	test-mlogloss:0.505867
[1230]	train-mlogloss:0.320454	test-mlogloss:0.505893
[1240]	train-mlogloss:0.319324	test-mlogloss:0.505865
[1250]	train-mlogloss:0.318108	test-mlogloss:0.505806
[1260]	train-mlogloss:0.316875	test-mlogloss:0.505727
[1270]	train-mlogloss:0.315737	test-mlogloss:0.505709
[1280]	train-mlogloss:0.314662	test-mlogloss:0.505695
[1290]	train-mlogloss:0.313601	test-mlogloss:0.50568
[1300]	train-mlogloss:0.312365	test-mlogloss:0.505622
[1310]	train-mlogloss:0.311312	test-mlogloss:0.505735
[1320]	train-mlogloss:0.310273	test-mlogloss:0.505719
[1330]	train-mlogloss:0.309157	test-mlogloss:0.505707
[1340]	train-mlogloss:0.30811	test-mlogloss:0.505725
[1350]	train-mlogloss:0.307024	test-mlogloss:0.505628
Stopping. Best iteration:
[1300]	train-mlogloss:0.312365	test-mlogloss:0.505622

[0.50354797493140779, 0.49727635213071869, 0.50904081005502133, 0.50562155057920022]
[0]	train-mlogloss:1.08437	test-mlogloss:1.08469
Multiple eval metrics have been passed: 'test-mlogloss' will be used for early stopping.

Will train until test-mlogloss hasn't improved in 50 rounds.
[10]	train-mlogloss:0.961517	test-mlogloss:0.964832
[20]	train-mlogloss:0.869499	test-mlogloss:0.875438
[30]	train-mlogloss:0.798566	test-mlogloss:0.806999
[40]	train-mlogloss:0.743055	test-mlogloss:0.753922
[50]	train-mlogloss:0.699129	test-mlogloss:0.712352
[60]	train-mlogloss:0.663576	test-mlogloss:0.679108
[70]	train-mlogloss:0.634785	test-mlogloss:0.652494
[80]	train-mlogloss:0.611254	test-mlogloss:0.631034
[90]	train-mlogloss:0.591908	test-mlogloss:0.613743
[100]	train-mlogloss:0.575665	test-mlogloss:0.59951
[110]	train-mlogloss:0.562081	test-mlogloss:0.587843
[120]	train-mlogloss:0.550551	test-mlogloss:0.578276
[130]	train-mlogloss:0.540389	test-mlogloss:0.570079
[140]	train-mlogloss:0.531721	test-mlogloss:0.563221
[150]	train-mlogloss:0.523981	test-mlogloss:0.557387
[160]	train-mlogloss:0.517068	test-mlogloss:0.552458
[170]	train-mlogloss:0.510985	test-mlogloss:0.548231
[180]	train-mlogloss:0.505312	test-mlogloss:0.544558
[190]	train-mlogloss:0.500503	test-mlogloss:0.541199
[200]	train-mlogloss:0.495856	test-mlogloss:0.538408
[210]	train-mlogloss:0.491453	test-mlogloss:0.535809
[220]	train-mlogloss:0.487475	test-mlogloss:0.533604
[230]	train-mlogloss:0.483761	test-mlogloss:0.531505
[240]	train-mlogloss:0.480191	test-mlogloss:0.529776
[250]	train-mlogloss:0.476755	test-mlogloss:0.52803
[260]	train-mlogloss:0.473638	test-mlogloss:0.526556
[270]	train-mlogloss:0.470523	test-mlogloss:0.52521
[280]	train-mlogloss:0.467623	test-mlogloss:0.523973
[290]	train-mlogloss:0.464854	test-mlogloss:0.522805
[300]	train-mlogloss:0.462263	test-mlogloss:0.521746
[310]	train-mlogloss:0.459581	test-mlogloss:0.520755
[320]	train-mlogloss:0.457196	test-mlogloss:0.519821
[330]	train-mlogloss:0.454562	test-mlogloss:0.518978
[340]	train-mlogloss:0.452289	test-mlogloss:0.518345
[350]	train-mlogloss:0.450212	test-mlogloss:0.517635
[360]	train-mlogloss:0.447927	test-mlogloss:0.516965
[370]	train-mlogloss:0.445661	test-mlogloss:0.516356
[380]	train-mlogloss:0.443638	test-mlogloss:0.515821
[390]	train-mlogloss:0.441685	test-mlogloss:0.515256
[400]	train-mlogloss:0.439611	test-mlogloss:0.514698
[410]	train-mlogloss:0.437697	test-mlogloss:0.514221
[420]	train-mlogloss:0.435804	test-mlogloss:0.513727
[430]	train-mlogloss:0.433926	test-mlogloss:0.513322
[440]	train-mlogloss:0.431995	test-mlogloss:0.512914
[450]	train-mlogloss:0.430253	test-mlogloss:0.512556
[460]	train-mlogloss:0.42848	test-mlogloss:0.512116
[470]	train-mlogloss:0.426735	test-mlogloss:0.511707
[480]	train-mlogloss:0.424914	test-mlogloss:0.511423
[490]	train-mlogloss:0.423261	test-mlogloss:0.511043
[500]	train-mlogloss:0.42161	test-mlogloss:0.510801
[510]	train-mlogloss:0.419805	test-mlogloss:0.510524
[520]	train-mlogloss:0.418161	test-mlogloss:0.510257
[530]	train-mlogloss:0.416549	test-mlogloss:0.510007
[540]	train-mlogloss:0.414969	test-mlogloss:0.509764
[550]	train-mlogloss:0.413303	test-mlogloss:0.509539
[560]	train-mlogloss:0.41152	test-mlogloss:0.50934
[570]	train-mlogloss:0.41007	test-mlogloss:0.509105
[580]	train-mlogloss:0.408359	test-mlogloss:0.508863
[590]	train-mlogloss:0.406938	test-mlogloss:0.508725
[600]	train-mlogloss:0.405358	test-mlogloss:0.508521
[610]	train-mlogloss:0.403832	test-mlogloss:0.508249
[620]	train-mlogloss:0.402341	test-mlogloss:0.508104
[630]	train-mlogloss:0.400572	test-mlogloss:0.507892
[640]	train-mlogloss:0.399136	test-mlogloss:0.507744
[650]	train-mlogloss:0.397506	test-mlogloss:0.507566
[660]	train-mlogloss:0.396105	test-mlogloss:0.50735
[670]	train-mlogloss:0.394499	test-mlogloss:0.50718
[680]	train-mlogloss:0.392887	test-mlogloss:0.506973
[690]	train-mlogloss:0.391279	test-mlogloss:0.506815
[700]	train-mlogloss:0.389798	test-mlogloss:0.506686
[710]	train-mlogloss:0.388279	test-mlogloss:0.506577
[720]	train-mlogloss:0.386946	test-mlogloss:0.506397
[730]	train-mlogloss:0.385378	test-mlogloss:0.506309
[740]	train-mlogloss:0.384035	test-mlogloss:0.506183
[750]	train-mlogloss:0.382516	test-mlogloss:0.506095
[760]	train-mlogloss:0.38108	test-mlogloss:0.505983
[770]	train-mlogloss:0.37966	test-mlogloss:0.505891
[780]	train-mlogloss:0.378238	test-mlogloss:0.505788
[790]	train-mlogloss:0.3769	test-mlogloss:0.505683
[800]	train-mlogloss:0.375646	test-mlogloss:0.505608
[810]	train-mlogloss:0.374144	test-mlogloss:0.505501
[820]	train-mlogloss:0.372702	test-mlogloss:0.505424
[830]	train-mlogloss:0.371296	test-mlogloss:0.505411
[840]	train-mlogloss:0.369804	test-mlogloss:0.505351
[850]	train-mlogloss:0.368521	test-mlogloss:0.505282
[860]	train-mlogloss:0.367096	test-mlogloss:0.505201
[870]	train-mlogloss:0.365739	test-mlogloss:0.505056
[880]	train-mlogloss:0.364203	test-mlogloss:0.504942
[890]	train-mlogloss:0.36287	test-mlogloss:0.504868
[900]	train-mlogloss:0.361514	test-mlogloss:0.504776
[910]	train-mlogloss:0.3601	test-mlogloss:0.504664
[920]	train-mlogloss:0.35877	test-mlogloss:0.504659
[930]	train-mlogloss:0.3574	test-mlogloss:0.504576
[940]	train-mlogloss:0.356138	test-mlogloss:0.504547
[950]	train-mlogloss:0.354866	test-mlogloss:0.504486
[960]	train-mlogloss:0.353533	test-mlogloss:0.504421
[970]	train-mlogloss:0.352283	test-mlogloss:0.504366
[980]	train-mlogloss:0.350951	test-mlogloss:0.504301
[990]	train-mlogloss:0.349846	test-mlogloss:0.5042
[1000]	train-mlogloss:0.348739	test-mlogloss:0.504165
[1010]	train-mlogloss:0.347497	test-mlogloss:0.50409
[1020]	train-mlogloss:0.346223	test-mlogloss:0.50396
[1030]	train-mlogloss:0.34486	test-mlogloss:0.503909
[1040]	train-mlogloss:0.343493	test-mlogloss:0.503808
[1050]	train-mlogloss:0.342261	test-mlogloss:0.503758
[1060]	train-mlogloss:0.341042	test-mlogloss:0.503685
[1070]	train-mlogloss:0.339874	test-mlogloss:0.503614
[1080]	train-mlogloss:0.338726	test-mlogloss:0.503542
[1090]	train-mlogloss:0.337481	test-mlogloss:0.503559
[1100]	train-mlogloss:0.336182	test-mlogloss:0.503523
[1110]	train-mlogloss:0.335128	test-mlogloss:0.503474
[1120]	train-mlogloss:0.333994	test-mlogloss:0.50349
[1130]	train-mlogloss:0.332911	test-mlogloss:0.503507
[1140]	train-mlogloss:0.331737	test-mlogloss:0.503426
[1150]	train-mlogloss:0.330541	test-mlogloss:0.503429
[1160]	train-mlogloss:0.329218	test-mlogloss:0.503446
[1170]	train-mlogloss:0.328009	test-mlogloss:0.50342
[1180]	train-mlogloss:0.32684	test-mlogloss:0.503287
[1190]	train-mlogloss:0.3257	test-mlogloss:0.503249
[1200]	train-mlogloss:0.324627	test-mlogloss:0.50324
[1210]	train-mlogloss:0.323567	test-mlogloss:0.50319
[1220]	train-mlogloss:0.322341	test-mlogloss:0.503154
[1230]	train-mlogloss:0.32121	test-mlogloss:0.503181
[1240]	train-mlogloss:0.319958	test-mlogloss:0.503165
[1250]	train-mlogloss:0.318721	test-mlogloss:0.503188
[1260]	train-mlogloss:0.317462	test-mlogloss:0.503167
[1270]	train-mlogloss:0.316274	test-mlogloss:0.503114
[1280]	train-mlogloss:0.315078	test-mlogloss:0.503138
[1290]	train-mlogloss:0.313979	test-mlogloss:0.503143
[1300]	train-mlogloss:0.312816	test-mlogloss:0.503163
[1310]	train-mlogloss:0.31169	test-mlogloss:0.50316
[1320]	train-mlogloss:0.310757	test-mlogloss:0.503137
Stopping. Best iteration:
[1273]	train-mlogloss:0.315966	test-mlogloss:0.503096

[0.50354797493140779, 0.49727635213071869, 0.50904081005502133, 0.50562155057920022, 0.50309564042943167]
0.503716503364

In [19]:
dfs3 = run3_to_stackdf(rv3)
pickle.dump(dfs3, open('modeloutput-xgb-clf-r3.pkl', 'wb'))

In [9]:
def run_to_stackdf(run):
    df_testpreds = pd.DataFrame(run[2].mean(axis=0))
    df_testpreds.columns = ['level']
    df_testpreds['listing_id'] = cv_test[0].listing_id
    df_allpreds = pd.concat([run[1][['level', 'listing_id']], df_testpreds])

    df_allpreds.sort_values('listing_id', inplace=True)
    df_allpreds.set_index('listing_id', inplace=True)

    df_fold = []
    for f in range(run[2].shape[0]):
        df_fold.append(pd.DataFrame(run[2][f]))
        df_fold[-1]['listing_id'] = test_df.listing_id
        df_fold[-1].sort_values('listing_id', inplace=True)
        df_fold[-1].set_index('listing_id', inplace=True)

    return (df_allpreds, df_fold)

In [10]:
def runXGB1(train_X, train_y, test_X, test_y=None, feature_names=None, seed_val=0, num_rounds=4000):
    param = {}
    param['objective'] = 'reg:logistic'
    #param['tree_method'] = 'hist'
    param['eta'] = 0.02
    param['max_depth'] = 6
    param['silent'] = 1
    param['num_class'] = 1
    param['eval_metric'] = "rmse"
    param['min_child_weight'] = 1
    param['subsample'] = 0.7
    param['colsample_bytree'] = 0.7
    param['seed'] = seed_val
    param['base_score'] = train_y.mean()
    num_rounds = num_rounds

    plst = list(param.items())
    xgtrain = xgb.DMatrix(train_X, label=train_y)

    if test_y is not None:
        xgtest = xgb.DMatrix(test_X, label=test_y)
        watchlist = [ (xgtrain,'train'), (xgtest, 'test') ]
        model = xgb.train(plst, xgtrain, num_rounds, watchlist, early_stopping_rounds=50, verbose_eval=10)
    else:
        xgtest = xgb.DMatrix(test_X)
        model = xgb.train(plst, xgtrain, num_rounds)

    pred_test_y = model.predict(xgtest, ntree_limit=model.best_ntree_limit)
    return pred_test_y, model

In [11]:
medium_regression_tgt = (.5 + (9/13)) / 2

def run_cv1(train_df, cv_test, kf, features_to_use):
    
    train_X = train_df[features_to_use] #sparse.hstack([train_df[features_to_use], tr_sparse]).tocsr()
    train_y3 = np.array(train_df['interest_level'].apply(lambda x: target_num_map[x]))
    
    train_y = np.zeros_like(train_y3, dtype=np.float32)
    train_y[train_y3 == 1] = medium_regression_tgt
    train_y[train_y3 == 2] = 1

    cv_preds = []
    cv_scores = []
    models = []
    test_preds = []
    
    fold = 0

    for dev_index, val_index in kf.split(range(train_X.shape[0]), train_y):

        dev_X, val_X = train_X.iloc[dev_index], train_X.iloc[val_index]
        dev_y, val_y = train_y[dev_index], train_y[val_index]
        preds, model = runXGB1(dev_X, dev_y, val_X, val_y)
        models.append(model)

        cv_scores.append(model.best_score)
        print(cv_scores)

        cut_df = train_df.iloc[val_index]
        
        out_df = pd.DataFrame(preds)
        out_df.columns = ["level"]
        out_df["listing_id"] = cut_df.listing_id.values
        out_df['interest_tgt'] = val_y # cut_df.interest.values

        cv_preds.append(out_df)

        xgtest = xgb.DMatrix(cv_test[fold][features_to_use])
        test_preds.append(model.predict(xgtest, ntree_limit=model.best_ntree_limit))

    df_cv = pd.concat(cv_preds)
    print(np.sqrt(sklearn.metrics.mean_squared_error(df_cv.interest_tgt, df_cv.level)))
    
    apreds = np.array(test_preds)
    
    return models, df_cv, apreds

In [12]:
rv1 = run_cv1(train_df, cv_test, kf, fl)


[0]	train-rmse:0.334207	test-rmse:0.334246
Multiple eval metrics have been passed: 'test-rmse' will be used for early stopping.

Will train until test-rmse hasn't improved in 50 rounds.
[10]	train-rmse:0.313838	test-rmse:0.314438
[20]	train-rmse:0.29779	test-rmse:0.29909
[30]	train-rmse:0.285302	test-rmse:0.287287
[40]	train-rmse:0.27554	test-rmse:0.278241
[50]	train-rmse:0.268255	test-rmse:0.271672
[60]	train-rmse:0.262388	test-rmse:0.266489
[70]	train-rmse:0.258037	test-rmse:0.262683
[80]	train-rmse:0.2544	test-rmse:0.259523
[90]	train-rmse:0.251435	test-rmse:0.257065
[100]	train-rmse:0.248993	test-rmse:0.255148
[110]	train-rmse:0.246917	test-rmse:0.253506
[120]	train-rmse:0.245168	test-rmse:0.252138
[130]	train-rmse:0.243581	test-rmse:0.250978
[140]	train-rmse:0.242149	test-rmse:0.249958
[150]	train-rmse:0.240888	test-rmse:0.24912
[160]	train-rmse:0.239864	test-rmse:0.248472
[170]	train-rmse:0.23884	test-rmse:0.247824
[180]	train-rmse:0.237959	test-rmse:0.247244
[190]	train-rmse:0.237147	test-rmse:0.246759
[200]	train-rmse:0.236228	test-rmse:0.246335
[210]	train-rmse:0.235378	test-rmse:0.24584
[220]	train-rmse:0.234634	test-rmse:0.245451
[230]	train-rmse:0.233899	test-rmse:0.24507
[240]	train-rmse:0.233375	test-rmse:0.244773
[250]	train-rmse:0.232802	test-rmse:0.244463
[260]	train-rmse:0.232169	test-rmse:0.244183
[270]	train-rmse:0.231624	test-rmse:0.243925
[280]	train-rmse:0.231101	test-rmse:0.243663
[290]	train-rmse:0.230498	test-rmse:0.243431
[300]	train-rmse:0.230045	test-rmse:0.24323
[310]	train-rmse:0.229596	test-rmse:0.243034
[320]	train-rmse:0.22899	test-rmse:0.242834
[330]	train-rmse:0.22857	test-rmse:0.242652
[340]	train-rmse:0.228186	test-rmse:0.242506
[350]	train-rmse:0.227671	test-rmse:0.242362
[360]	train-rmse:0.227087	test-rmse:0.242227
[370]	train-rmse:0.226571	test-rmse:0.242033
[380]	train-rmse:0.226087	test-rmse:0.24189
[390]	train-rmse:0.225687	test-rmse:0.241813
[400]	train-rmse:0.225102	test-rmse:0.241633
[410]	train-rmse:0.224542	test-rmse:0.24151
[420]	train-rmse:0.223946	test-rmse:0.241387
[430]	train-rmse:0.223446	test-rmse:0.241264
[440]	train-rmse:0.222965	test-rmse:0.241135
[450]	train-rmse:0.222482	test-rmse:0.241039
[460]	train-rmse:0.222055	test-rmse:0.240929
[470]	train-rmse:0.221512	test-rmse:0.240795
[480]	train-rmse:0.221065	test-rmse:0.240694
[490]	train-rmse:0.220728	test-rmse:0.240615
[500]	train-rmse:0.220333	test-rmse:0.240523
[510]	train-rmse:0.219999	test-rmse:0.240476
[520]	train-rmse:0.219455	test-rmse:0.240416
[530]	train-rmse:0.219018	test-rmse:0.240327
[540]	train-rmse:0.218645	test-rmse:0.240249
[550]	train-rmse:0.218106	test-rmse:0.240167
[560]	train-rmse:0.217571	test-rmse:0.240131
[570]	train-rmse:0.217186	test-rmse:0.24007
[580]	train-rmse:0.216688	test-rmse:0.239979
[590]	train-rmse:0.21621	test-rmse:0.239899
[600]	train-rmse:0.215744	test-rmse:0.239857
[610]	train-rmse:0.21541	test-rmse:0.239805
[620]	train-rmse:0.214905	test-rmse:0.239755
[630]	train-rmse:0.214556	test-rmse:0.239699
[640]	train-rmse:0.214117	test-rmse:0.23964
[650]	train-rmse:0.213696	test-rmse:0.239565
[660]	train-rmse:0.213246	test-rmse:0.239531
[670]	train-rmse:0.212853	test-rmse:0.239449
[680]	train-rmse:0.212505	test-rmse:0.239414
[690]	train-rmse:0.212031	test-rmse:0.239379
[700]	train-rmse:0.211502	test-rmse:0.239345
[710]	train-rmse:0.21113	test-rmse:0.23932
[720]	train-rmse:0.21067	test-rmse:0.239253
[730]	train-rmse:0.210238	test-rmse:0.239201
[740]	train-rmse:0.209893	test-rmse:0.239154
[750]	train-rmse:0.209434	test-rmse:0.239095
[760]	train-rmse:0.208963	test-rmse:0.239039
[770]	train-rmse:0.208544	test-rmse:0.238977
[780]	train-rmse:0.208165	test-rmse:0.238905
[790]	train-rmse:0.207726	test-rmse:0.238811
[800]	train-rmse:0.207382	test-rmse:0.238765
[810]	train-rmse:0.207012	test-rmse:0.238745
[820]	train-rmse:0.206605	test-rmse:0.238713
[830]	train-rmse:0.206202	test-rmse:0.23866
[840]	train-rmse:0.205757	test-rmse:0.238657
[850]	train-rmse:0.205373	test-rmse:0.238644
[860]	train-rmse:0.204958	test-rmse:0.238607
[870]	train-rmse:0.204606	test-rmse:0.238583
[880]	train-rmse:0.204173	test-rmse:0.238536
[890]	train-rmse:0.203884	test-rmse:0.238526
[900]	train-rmse:0.203426	test-rmse:0.23848
[910]	train-rmse:0.203046	test-rmse:0.238436
[920]	train-rmse:0.202674	test-rmse:0.238413
[930]	train-rmse:0.20221	test-rmse:0.238382
[940]	train-rmse:0.20186	test-rmse:0.238375
[950]	train-rmse:0.201444	test-rmse:0.238347
[960]	train-rmse:0.20102	test-rmse:0.23833
[970]	train-rmse:0.200692	test-rmse:0.238311
[980]	train-rmse:0.20025	test-rmse:0.238301
[990]	train-rmse:0.199801	test-rmse:0.238303
[1000]	train-rmse:0.199396	test-rmse:0.238279
[1010]	train-rmse:0.199022	test-rmse:0.238274
[1020]	train-rmse:0.198643	test-rmse:0.238248
[1030]	train-rmse:0.198348	test-rmse:0.238215
[1040]	train-rmse:0.19799	test-rmse:0.238195
[1050]	train-rmse:0.197678	test-rmse:0.2382
[1060]	train-rmse:0.19728	test-rmse:0.238156
[1070]	train-rmse:0.196929	test-rmse:0.23812
[1080]	train-rmse:0.196534	test-rmse:0.23807
[1090]	train-rmse:0.19622	test-rmse:0.238083
[1100]	train-rmse:0.195799	test-rmse:0.238069
[1110]	train-rmse:0.195425	test-rmse:0.238067
[1120]	train-rmse:0.195083	test-rmse:0.238073
[1130]	train-rmse:0.194663	test-rmse:0.23804
[1140]	train-rmse:0.194367	test-rmse:0.23803
[1150]	train-rmse:0.194004	test-rmse:0.238014
[1160]	train-rmse:0.193643	test-rmse:0.238008
[1170]	train-rmse:0.193261	test-rmse:0.237982
[1180]	train-rmse:0.19286	test-rmse:0.237949
[1190]	train-rmse:0.192459	test-rmse:0.23793
[1200]	train-rmse:0.192057	test-rmse:0.23791
[1210]	train-rmse:0.191714	test-rmse:0.237894
[1220]	train-rmse:0.191347	test-rmse:0.237895
[1230]	train-rmse:0.190948	test-rmse:0.237879
[1240]	train-rmse:0.190604	test-rmse:0.237843
[1250]	train-rmse:0.190214	test-rmse:0.237815
[1260]	train-rmse:0.189863	test-rmse:0.237804
[1270]	train-rmse:0.189436	test-rmse:0.237775
[1280]	train-rmse:0.189103	test-rmse:0.23776
[1290]	train-rmse:0.188756	test-rmse:0.237747
[1300]	train-rmse:0.188435	test-rmse:0.237736
[1310]	train-rmse:0.188003	test-rmse:0.237714
[1320]	train-rmse:0.187613	test-rmse:0.237663
[1330]	train-rmse:0.187307	test-rmse:0.237645
[1340]	train-rmse:0.186947	test-rmse:0.237644
[1350]	train-rmse:0.186576	test-rmse:0.237657
[1360]	train-rmse:0.186228	test-rmse:0.237643
[1370]	train-rmse:0.185895	test-rmse:0.237682
[1380]	train-rmse:0.185459	test-rmse:0.237656
[1390]	train-rmse:0.185138	test-rmse:0.237601
[1400]	train-rmse:0.184826	test-rmse:0.237591
[1410]	train-rmse:0.18449	test-rmse:0.237602
[1420]	train-rmse:0.184151	test-rmse:0.237583
[1430]	train-rmse:0.183794	test-rmse:0.237552
[1440]	train-rmse:0.183433	test-rmse:0.237568
[1450]	train-rmse:0.183089	test-rmse:0.23753
[1460]	train-rmse:0.18277	test-rmse:0.237504
[1470]	train-rmse:0.182445	test-rmse:0.237508
[1480]	train-rmse:0.182045	test-rmse:0.237526
[1490]	train-rmse:0.181734	test-rmse:0.237504
[1500]	train-rmse:0.181468	test-rmse:0.2375
[1510]	train-rmse:0.181231	test-rmse:0.237491
[1520]	train-rmse:0.180947	test-rmse:0.237486
[1530]	train-rmse:0.180595	test-rmse:0.237489
[1540]	train-rmse:0.180192	test-rmse:0.237507
[1550]	train-rmse:0.179865	test-rmse:0.237501
[1560]	train-rmse:0.179501	test-rmse:0.237523
[1570]	train-rmse:0.179137	test-rmse:0.237528
Stopping. Best iteration:
[1520]	train-rmse:0.180947	test-rmse:0.237486

[0.237486]
[0]	train-rmse:0.334188	test-rmse:0.334237
Multiple eval metrics have been passed: 'test-rmse' will be used for early stopping.

Will train until test-rmse hasn't improved in 50 rounds.
[10]	train-rmse:0.313895	test-rmse:0.314328
[20]	train-rmse:0.297897	test-rmse:0.298755
[30]	train-rmse:0.285451	test-rmse:0.286825
[40]	train-rmse:0.275749	test-rmse:0.277552
[50]	train-rmse:0.268425	test-rmse:0.270724
[60]	train-rmse:0.262726	test-rmse:0.265503
[70]	train-rmse:0.258354	test-rmse:0.261597
[80]	train-rmse:0.254709	test-rmse:0.258391
[90]	train-rmse:0.251762	test-rmse:0.255882
[100]	train-rmse:0.249267	test-rmse:0.253841
[110]	train-rmse:0.247295	test-rmse:0.252208
[120]	train-rmse:0.245461	test-rmse:0.250854
[130]	train-rmse:0.243865	test-rmse:0.249683
[140]	train-rmse:0.242477	test-rmse:0.248658
[150]	train-rmse:0.241306	test-rmse:0.247816
[160]	train-rmse:0.240241	test-rmse:0.247045
[170]	train-rmse:0.239169	test-rmse:0.246338
[180]	train-rmse:0.238234	test-rmse:0.24572
[190]	train-rmse:0.23726	test-rmse:0.245181
[200]	train-rmse:0.236489	test-rmse:0.244748
[210]	train-rmse:0.235712	test-rmse:0.244356
[220]	train-rmse:0.234954	test-rmse:0.243959
[230]	train-rmse:0.234181	test-rmse:0.243612
[240]	train-rmse:0.233532	test-rmse:0.243308
[250]	train-rmse:0.232865	test-rmse:0.243013
[260]	train-rmse:0.232242	test-rmse:0.242777
[270]	train-rmse:0.231808	test-rmse:0.242556
[280]	train-rmse:0.231295	test-rmse:0.242312
[290]	train-rmse:0.230784	test-rmse:0.242165
[300]	train-rmse:0.230312	test-rmse:0.242017
[310]	train-rmse:0.229763	test-rmse:0.241784
[320]	train-rmse:0.229197	test-rmse:0.241608
[330]	train-rmse:0.228665	test-rmse:0.241466
[340]	train-rmse:0.228102	test-rmse:0.241312
[350]	train-rmse:0.227485	test-rmse:0.241108
[360]	train-rmse:0.227034	test-rmse:0.24102
[370]	train-rmse:0.226411	test-rmse:0.240862
[380]	train-rmse:0.22607	test-rmse:0.240767
[390]	train-rmse:0.225561	test-rmse:0.240671
[400]	train-rmse:0.225139	test-rmse:0.240613
[410]	train-rmse:0.22461	test-rmse:0.240502
[420]	train-rmse:0.224074	test-rmse:0.240387
[430]	train-rmse:0.223488	test-rmse:0.240299
[440]	train-rmse:0.222977	test-rmse:0.240228
[450]	train-rmse:0.222557	test-rmse:0.240163
[460]	train-rmse:0.222071	test-rmse:0.240033
[470]	train-rmse:0.221544	test-rmse:0.239939
[480]	train-rmse:0.221029	test-rmse:0.23984
[490]	train-rmse:0.220597	test-rmse:0.23974
[500]	train-rmse:0.220075	test-rmse:0.239637
[510]	train-rmse:0.21966	test-rmse:0.239564
[520]	train-rmse:0.219253	test-rmse:0.239529
[530]	train-rmse:0.2189	test-rmse:0.239477
[540]	train-rmse:0.218449	test-rmse:0.239412
[550]	train-rmse:0.217926	test-rmse:0.239336
[560]	train-rmse:0.217507	test-rmse:0.23928
[570]	train-rmse:0.217038	test-rmse:0.239233
[580]	train-rmse:0.216744	test-rmse:0.239181
[590]	train-rmse:0.216228	test-rmse:0.239112
[600]	train-rmse:0.215787	test-rmse:0.239029
[610]	train-rmse:0.215374	test-rmse:0.238946
[620]	train-rmse:0.215007	test-rmse:0.23889
[630]	train-rmse:0.214628	test-rmse:0.23886
[640]	train-rmse:0.214119	test-rmse:0.238814
[650]	train-rmse:0.213666	test-rmse:0.238769
[660]	train-rmse:0.213174	test-rmse:0.238711
[670]	train-rmse:0.212806	test-rmse:0.238657
[680]	train-rmse:0.212346	test-rmse:0.238615
[690]	train-rmse:0.211865	test-rmse:0.238569
[700]	train-rmse:0.211485	test-rmse:0.238542
[710]	train-rmse:0.211151	test-rmse:0.238457
[720]	train-rmse:0.210728	test-rmse:0.238404
[730]	train-rmse:0.210299	test-rmse:0.238411
[740]	train-rmse:0.209903	test-rmse:0.238372
[750]	train-rmse:0.209489	test-rmse:0.238361
[760]	train-rmse:0.209184	test-rmse:0.238338
[770]	train-rmse:0.208774	test-rmse:0.23826
[780]	train-rmse:0.208341	test-rmse:0.238263
[790]	train-rmse:0.207972	test-rmse:0.238259
[800]	train-rmse:0.207556	test-rmse:0.238232
[810]	train-rmse:0.207129	test-rmse:0.238191
[820]	train-rmse:0.206727	test-rmse:0.238161
[830]	train-rmse:0.206312	test-rmse:0.238113
[840]	train-rmse:0.205931	test-rmse:0.238041
[850]	train-rmse:0.205547	test-rmse:0.238037
[860]	train-rmse:0.205118	test-rmse:0.237971
[870]	train-rmse:0.204606	test-rmse:0.237935
[880]	train-rmse:0.204263	test-rmse:0.237914
[890]	train-rmse:0.203986	test-rmse:0.237907
[900]	train-rmse:0.203592	test-rmse:0.237876
[910]	train-rmse:0.203231	test-rmse:0.237838
[920]	train-rmse:0.202749	test-rmse:0.23782
[930]	train-rmse:0.202361	test-rmse:0.237829
[940]	train-rmse:0.202002	test-rmse:0.237791
[950]	train-rmse:0.201517	test-rmse:0.237804
[960]	train-rmse:0.201111	test-rmse:0.23779
[970]	train-rmse:0.200719	test-rmse:0.237792
[980]	train-rmse:0.200296	test-rmse:0.237759
[990]	train-rmse:0.199819	test-rmse:0.237721
[1000]	train-rmse:0.199455	test-rmse:0.237705
[1010]	train-rmse:0.199145	test-rmse:0.237727
[1020]	train-rmse:0.19882	test-rmse:0.237714
[1030]	train-rmse:0.198353	test-rmse:0.237707
[1040]	train-rmse:0.197897	test-rmse:0.237695
[1050]	train-rmse:0.197546	test-rmse:0.237701
[1060]	train-rmse:0.197171	test-rmse:0.237689
[1070]	train-rmse:0.196806	test-rmse:0.237633
[1080]	train-rmse:0.196373	test-rmse:0.237662
[1090]	train-rmse:0.196043	test-rmse:0.237674
[1100]	train-rmse:0.195755	test-rmse:0.237667
[1110]	train-rmse:0.195355	test-rmse:0.237675
[1120]	train-rmse:0.194978	test-rmse:0.237699
Stopping. Best iteration:
[1075]	train-rmse:0.196628	test-rmse:0.237626

[0.237486, 0.237626]
[0]	train-rmse:0.334147	test-rmse:0.334257
Multiple eval metrics have been passed: 'test-rmse' will be used for early stopping.

Will train until test-rmse hasn't improved in 50 rounds.
[10]	train-rmse:0.313507	test-rmse:0.315023
[20]	train-rmse:0.297255	test-rmse:0.30019
[30]	train-rmse:0.28468	test-rmse:0.288742
[40]	train-rmse:0.27486	test-rmse:0.279944
[50]	train-rmse:0.267487	test-rmse:0.273484
[60]	train-rmse:0.261667	test-rmse:0.268495
[70]	train-rmse:0.257249	test-rmse:0.264832
[80]	train-rmse:0.253564	test-rmse:0.261776
[90]	train-rmse:0.250589	test-rmse:0.259433
[100]	train-rmse:0.248166	test-rmse:0.257565
[110]	train-rmse:0.246152	test-rmse:0.25604
[120]	train-rmse:0.244417	test-rmse:0.254818
[130]	train-rmse:0.242875	test-rmse:0.253663
[140]	train-rmse:0.241516	test-rmse:0.252783
[150]	train-rmse:0.240343	test-rmse:0.25199
[160]	train-rmse:0.239241	test-rmse:0.251289
[170]	train-rmse:0.23827	test-rmse:0.250665
[180]	train-rmse:0.237251	test-rmse:0.250054
[190]	train-rmse:0.236361	test-rmse:0.249485
[200]	train-rmse:0.235523	test-rmse:0.249039
[210]	train-rmse:0.234875	test-rmse:0.248645
[220]	train-rmse:0.234187	test-rmse:0.248266
[230]	train-rmse:0.233435	test-rmse:0.247863
[240]	train-rmse:0.232607	test-rmse:0.247441
[250]	train-rmse:0.232051	test-rmse:0.247175
[260]	train-rmse:0.231385	test-rmse:0.246906
[270]	train-rmse:0.230791	test-rmse:0.246627
[280]	train-rmse:0.23028	test-rmse:0.246423
[290]	train-rmse:0.229765	test-rmse:0.246247
[300]	train-rmse:0.229147	test-rmse:0.246021
[310]	train-rmse:0.228754	test-rmse:0.245856
[320]	train-rmse:0.22829	test-rmse:0.245687
[330]	train-rmse:0.227828	test-rmse:0.245497
[340]	train-rmse:0.227446	test-rmse:0.245346
[350]	train-rmse:0.226878	test-rmse:0.245195
[360]	train-rmse:0.226333	test-rmse:0.245074
[370]	train-rmse:0.225761	test-rmse:0.244915
[380]	train-rmse:0.225273	test-rmse:0.244764
[390]	train-rmse:0.224687	test-rmse:0.244592
[400]	train-rmse:0.22416	test-rmse:0.244418
[410]	train-rmse:0.223628	test-rmse:0.244339
[420]	train-rmse:0.223086	test-rmse:0.24426
[430]	train-rmse:0.222548	test-rmse:0.244124
[440]	train-rmse:0.222107	test-rmse:0.244007
[450]	train-rmse:0.221668	test-rmse:0.243948
[460]	train-rmse:0.221243	test-rmse:0.243821
[470]	train-rmse:0.220681	test-rmse:0.243715
[480]	train-rmse:0.220214	test-rmse:0.243614
[490]	train-rmse:0.219749	test-rmse:0.243521
[500]	train-rmse:0.219329	test-rmse:0.243481
[510]	train-rmse:0.218891	test-rmse:0.243399
[520]	train-rmse:0.218496	test-rmse:0.24333
[530]	train-rmse:0.218092	test-rmse:0.243271
[540]	train-rmse:0.217661	test-rmse:0.243199
[550]	train-rmse:0.217254	test-rmse:0.243106
[560]	train-rmse:0.216829	test-rmse:0.243049
[570]	train-rmse:0.216392	test-rmse:0.242995
[580]	train-rmse:0.215867	test-rmse:0.242929
[590]	train-rmse:0.215477	test-rmse:0.242856
[600]	train-rmse:0.215092	test-rmse:0.242767
[610]	train-rmse:0.214651	test-rmse:0.242662
[620]	train-rmse:0.214208	test-rmse:0.242652
[630]	train-rmse:0.213748	test-rmse:0.24259
[640]	train-rmse:0.213373	test-rmse:0.242508
[650]	train-rmse:0.212934	test-rmse:0.24246
[660]	train-rmse:0.212641	test-rmse:0.242439
[670]	train-rmse:0.212242	test-rmse:0.242403
[680]	train-rmse:0.211907	test-rmse:0.242327
[690]	train-rmse:0.211341	test-rmse:0.242262
[700]	train-rmse:0.210999	test-rmse:0.242228
[710]	train-rmse:0.210479	test-rmse:0.242203
[720]	train-rmse:0.210112	test-rmse:0.242165
[730]	train-rmse:0.20973	test-rmse:0.242133
[740]	train-rmse:0.209305	test-rmse:0.242098
[750]	train-rmse:0.208915	test-rmse:0.242044
[760]	train-rmse:0.208522	test-rmse:0.241989
[770]	train-rmse:0.208091	test-rmse:0.241938
[780]	train-rmse:0.207662	test-rmse:0.241867
[790]	train-rmse:0.207204	test-rmse:0.241836
[800]	train-rmse:0.206771	test-rmse:0.241812
[810]	train-rmse:0.206285	test-rmse:0.241795
[820]	train-rmse:0.205894	test-rmse:0.24173
[830]	train-rmse:0.205437	test-rmse:0.241696
[840]	train-rmse:0.204946	test-rmse:0.241683
[850]	train-rmse:0.204467	test-rmse:0.241651
[860]	train-rmse:0.204097	test-rmse:0.241607
[870]	train-rmse:0.203732	test-rmse:0.241605
[880]	train-rmse:0.203366	test-rmse:0.241583
[890]	train-rmse:0.202945	test-rmse:0.241568
[900]	train-rmse:0.202579	test-rmse:0.241545
[910]	train-rmse:0.202171	test-rmse:0.241535
[920]	train-rmse:0.201762	test-rmse:0.241521
[930]	train-rmse:0.201335	test-rmse:0.241497
[940]	train-rmse:0.200986	test-rmse:0.241469
[950]	train-rmse:0.20062	test-rmse:0.241446
[960]	train-rmse:0.200299	test-rmse:0.241438
[970]	train-rmse:0.199929	test-rmse:0.241373
[980]	train-rmse:0.199488	test-rmse:0.241346
[990]	train-rmse:0.199085	test-rmse:0.241316
[1000]	train-rmse:0.198721	test-rmse:0.241298
[1010]	train-rmse:0.198419	test-rmse:0.241268
[1020]	train-rmse:0.198033	test-rmse:0.241269
[1030]	train-rmse:0.197702	test-rmse:0.241251
[1040]	train-rmse:0.197323	test-rmse:0.241243
[1050]	train-rmse:0.196983	test-rmse:0.24123
[1060]	train-rmse:0.196506	test-rmse:0.241199
[1070]	train-rmse:0.196158	test-rmse:0.24116
[1080]	train-rmse:0.195813	test-rmse:0.241156
[1090]	train-rmse:0.195477	test-rmse:0.241168
[1100]	train-rmse:0.19514	test-rmse:0.241138
[1110]	train-rmse:0.194768	test-rmse:0.241158
[1120]	train-rmse:0.19433	test-rmse:0.241152
[1130]	train-rmse:0.193963	test-rmse:0.241149
[1140]	train-rmse:0.193657	test-rmse:0.241121
[1150]	train-rmse:0.193241	test-rmse:0.241102
[1160]	train-rmse:0.192806	test-rmse:0.241085
[1170]	train-rmse:0.192406	test-rmse:0.241083
[1180]	train-rmse:0.192027	test-rmse:0.241033
[1190]	train-rmse:0.191615	test-rmse:0.240985
[1200]	train-rmse:0.191181	test-rmse:0.240956
[1210]	train-rmse:0.190807	test-rmse:0.240929
[1220]	train-rmse:0.190467	test-rmse:0.240894
[1230]	train-rmse:0.190171	test-rmse:0.240875
[1240]	train-rmse:0.189822	test-rmse:0.24085
[1250]	train-rmse:0.189382	test-rmse:0.24086
[1260]	train-rmse:0.189042	test-rmse:0.240822
[1270]	train-rmse:0.188645	test-rmse:0.24077
[1280]	train-rmse:0.188351	test-rmse:0.240787
[1290]	train-rmse:0.187988	test-rmse:0.240787
[1300]	train-rmse:0.187698	test-rmse:0.240769
[1310]	train-rmse:0.187318	test-rmse:0.240771
[1320]	train-rmse:0.186978	test-rmse:0.240768
[1330]	train-rmse:0.186713	test-rmse:0.240769
[1340]	train-rmse:0.186314	test-rmse:0.240757
[1350]	train-rmse:0.185984	test-rmse:0.240729
[1360]	train-rmse:0.185622	test-rmse:0.240748
[1370]	train-rmse:0.185297	test-rmse:0.240743
[1380]	train-rmse:0.184947	test-rmse:0.240741
[1390]	train-rmse:0.184615	test-rmse:0.240742
Stopping. Best iteration:
[1346]	train-rmse:0.186137	test-rmse:0.240721

[0.237486, 0.237626, 0.240721]
[0]	train-rmse:0.334172	test-rmse:0.3343
Multiple eval metrics have been passed: 'test-rmse' will be used for early stopping.

Will train until test-rmse hasn't improved in 50 rounds.
[10]	train-rmse:0.313717	test-rmse:0.314793
[20]	train-rmse:0.297577	test-rmse:0.299581
[30]	train-rmse:0.284995	test-rmse:0.287832
[40]	train-rmse:0.275272	test-rmse:0.278891
[50]	train-rmse:0.26783	test-rmse:0.272154
[60]	train-rmse:0.262065	test-rmse:0.267103
[70]	train-rmse:0.257667	test-rmse:0.263269
[80]	train-rmse:0.253994	test-rmse:0.260183
[90]	train-rmse:0.251059	test-rmse:0.257769
[100]	train-rmse:0.248575	test-rmse:0.255809
[110]	train-rmse:0.246476	test-rmse:0.254194
[120]	train-rmse:0.244665	test-rmse:0.252891
[130]	train-rmse:0.24309	test-rmse:0.251793
[140]	train-rmse:0.241712	test-rmse:0.250898
[150]	train-rmse:0.240568	test-rmse:0.250139
[160]	train-rmse:0.23942	test-rmse:0.249399
[170]	train-rmse:0.23842	test-rmse:0.248757
[180]	train-rmse:0.23754	test-rmse:0.248278
[190]	train-rmse:0.236681	test-rmse:0.247821
[200]	train-rmse:0.235779	test-rmse:0.247346
[210]	train-rmse:0.235031	test-rmse:0.246942
[220]	train-rmse:0.234333	test-rmse:0.246573
[230]	train-rmse:0.233726	test-rmse:0.24627
[240]	train-rmse:0.233019	test-rmse:0.245957
[250]	train-rmse:0.232296	test-rmse:0.245675
[260]	train-rmse:0.231638	test-rmse:0.245424
[270]	train-rmse:0.231058	test-rmse:0.245135
[280]	train-rmse:0.230466	test-rmse:0.244889
[290]	train-rmse:0.230018	test-rmse:0.24465
[300]	train-rmse:0.229493	test-rmse:0.244475
[310]	train-rmse:0.228886	test-rmse:0.244266
[320]	train-rmse:0.228373	test-rmse:0.244051
[330]	train-rmse:0.227917	test-rmse:0.243856
[340]	train-rmse:0.227328	test-rmse:0.243686
[350]	train-rmse:0.226816	test-rmse:0.243514
[360]	train-rmse:0.226299	test-rmse:0.243361
[370]	train-rmse:0.225892	test-rmse:0.243227
[380]	train-rmse:0.225283	test-rmse:0.243124
[390]	train-rmse:0.224879	test-rmse:0.243003
[400]	train-rmse:0.224339	test-rmse:0.242812
[410]	train-rmse:0.223945	test-rmse:0.24271
[420]	train-rmse:0.223566	test-rmse:0.242612
[430]	train-rmse:0.222974	test-rmse:0.242495
[440]	train-rmse:0.222538	test-rmse:0.242392
[450]	train-rmse:0.222044	test-rmse:0.242299
[460]	train-rmse:0.221537	test-rmse:0.24214
[470]	train-rmse:0.22111	test-rmse:0.242006
[480]	train-rmse:0.220682	test-rmse:0.241929
[490]	train-rmse:0.22028	test-rmse:0.241829
[500]	train-rmse:0.219875	test-rmse:0.241755
[510]	train-rmse:0.2194	test-rmse:0.241688
[520]	train-rmse:0.219064	test-rmse:0.241652
[530]	train-rmse:0.218546	test-rmse:0.241553
[540]	train-rmse:0.218132	test-rmse:0.241471
[550]	train-rmse:0.217588	test-rmse:0.2414
[560]	train-rmse:0.217169	test-rmse:0.241354
[570]	train-rmse:0.216601	test-rmse:0.241273
[580]	train-rmse:0.216196	test-rmse:0.241204
[590]	train-rmse:0.215698	test-rmse:0.241131
[600]	train-rmse:0.215225	test-rmse:0.241053
[610]	train-rmse:0.21485	test-rmse:0.240989
[620]	train-rmse:0.21442	test-rmse:0.240935
[630]	train-rmse:0.21403	test-rmse:0.240876
[640]	train-rmse:0.213557	test-rmse:0.240791
[650]	train-rmse:0.213268	test-rmse:0.240759
[660]	train-rmse:0.212812	test-rmse:0.240695
[670]	train-rmse:0.212421	test-rmse:0.240649
[680]	train-rmse:0.211953	test-rmse:0.240586
[690]	train-rmse:0.211518	test-rmse:0.240575
[700]	train-rmse:0.211107	test-rmse:0.24049
[710]	train-rmse:0.210668	test-rmse:0.240452
[720]	train-rmse:0.210193	test-rmse:0.240407
[730]	train-rmse:0.209767	test-rmse:0.240405
[740]	train-rmse:0.209315	test-rmse:0.240385
[750]	train-rmse:0.208951	test-rmse:0.240325
[760]	train-rmse:0.208527	test-rmse:0.240303
[770]	train-rmse:0.208104	test-rmse:0.240268
[780]	train-rmse:0.207715	test-rmse:0.240233
[790]	train-rmse:0.207265	test-rmse:0.240171
[800]	train-rmse:0.206933	test-rmse:0.24011
[810]	train-rmse:0.206527	test-rmse:0.240075
[820]	train-rmse:0.20613	test-rmse:0.240048
[830]	train-rmse:0.205754	test-rmse:0.240005
[840]	train-rmse:0.205405	test-rmse:0.23998
[850]	train-rmse:0.205104	test-rmse:0.239971
[860]	train-rmse:0.204683	test-rmse:0.239929
[870]	train-rmse:0.20423	test-rmse:0.23994
[880]	train-rmse:0.20388	test-rmse:0.239889
[890]	train-rmse:0.203475	test-rmse:0.239877
[900]	train-rmse:0.203126	test-rmse:0.239857
[910]	train-rmse:0.202716	test-rmse:0.239827
[920]	train-rmse:0.202274	test-rmse:0.239804
[930]	train-rmse:0.201868	test-rmse:0.23979
[940]	train-rmse:0.201511	test-rmse:0.239771
[950]	train-rmse:0.201129	test-rmse:0.239742
[960]	train-rmse:0.200758	test-rmse:0.239715
[970]	train-rmse:0.200435	test-rmse:0.239696
[980]	train-rmse:0.20009	test-rmse:0.239677
[990]	train-rmse:0.1997	test-rmse:0.239658
[1000]	train-rmse:0.19933	test-rmse:0.239625
[1010]	train-rmse:0.198932	test-rmse:0.239603
[1020]	train-rmse:0.198608	test-rmse:0.23958
[1030]	train-rmse:0.198225	test-rmse:0.239572
[1040]	train-rmse:0.197886	test-rmse:0.239515
[1050]	train-rmse:0.197478	test-rmse:0.2395
[1060]	train-rmse:0.197105	test-rmse:0.239436
[1070]	train-rmse:0.196677	test-rmse:0.239439
[1080]	train-rmse:0.196293	test-rmse:0.239399
[1090]	train-rmse:0.195829	test-rmse:0.239337
[1100]	train-rmse:0.195502	test-rmse:0.23934
[1110]	train-rmse:0.195153	test-rmse:0.23931
[1120]	train-rmse:0.194788	test-rmse:0.239265
[1130]	train-rmse:0.1944	test-rmse:0.239263
[1140]	train-rmse:0.194068	test-rmse:0.239235
[1150]	train-rmse:0.193711	test-rmse:0.239256
[1160]	train-rmse:0.193288	test-rmse:0.239277
[1170]	train-rmse:0.192892	test-rmse:0.239259
[1180]	train-rmse:0.192608	test-rmse:0.23928
[1190]	train-rmse:0.192321	test-rmse:0.239252
[1200]	train-rmse:0.192007	test-rmse:0.23925
[1210]	train-rmse:0.191608	test-rmse:0.239256
[1220]	train-rmse:0.19117	test-rmse:0.23924
[1230]	train-rmse:0.190837	test-rmse:0.239239
[1240]	train-rmse:0.190398	test-rmse:0.239224
[1250]	train-rmse:0.190025	test-rmse:0.239219
[1260]	train-rmse:0.189737	test-rmse:0.239231
[1270]	train-rmse:0.189364	test-rmse:0.239235
[1280]	train-rmse:0.189032	test-rmse:0.239241
[1290]	train-rmse:0.188662	test-rmse:0.239201
[1300]	train-rmse:0.188317	test-rmse:0.239183
[1310]	train-rmse:0.187987	test-rmse:0.23923
[1320]	train-rmse:0.187663	test-rmse:0.239253
[1330]	train-rmse:0.187282	test-rmse:0.239233
[1340]	train-rmse:0.186947	test-rmse:0.239242
Stopping. Best iteration:
[1297]	train-rmse:0.18846	test-rmse:0.23917

[0.237486, 0.237626, 0.240721, 0.23917]
[0]	train-rmse:0.334199	test-rmse:0.334187
Multiple eval metrics have been passed: 'test-rmse' will be used for early stopping.

Will train until test-rmse hasn't improved in 50 rounds.
[10]	train-rmse:0.313606	test-rmse:0.314316
[20]	train-rmse:0.297341	test-rmse:0.298725
[30]	train-rmse:0.284749	test-rmse:0.286696
[40]	train-rmse:0.275155	test-rmse:0.277816
[50]	train-rmse:0.267837	test-rmse:0.271085
[60]	train-rmse:0.261946	test-rmse:0.265793
[70]	train-rmse:0.25745	test-rmse:0.261853
[80]	train-rmse:0.253868	test-rmse:0.258793
[90]	train-rmse:0.251041	test-rmse:0.256447
[100]	train-rmse:0.248688	test-rmse:0.254571
[110]	train-rmse:0.24672	test-rmse:0.253033
[120]	train-rmse:0.244978	test-rmse:0.251724
[130]	train-rmse:0.24341	test-rmse:0.250592
[140]	train-rmse:0.241999	test-rmse:0.249584
[150]	train-rmse:0.240746	test-rmse:0.248734
[160]	train-rmse:0.239672	test-rmse:0.248023
[170]	train-rmse:0.238739	test-rmse:0.247427
[180]	train-rmse:0.237782	test-rmse:0.246855
[190]	train-rmse:0.236904	test-rmse:0.2463
[200]	train-rmse:0.236024	test-rmse:0.245773
[210]	train-rmse:0.23533	test-rmse:0.24537
[220]	train-rmse:0.234538	test-rmse:0.24494
[230]	train-rmse:0.233816	test-rmse:0.244617
[240]	train-rmse:0.23316	test-rmse:0.244293
[250]	train-rmse:0.232604	test-rmse:0.244082
[260]	train-rmse:0.23203	test-rmse:0.243818
[270]	train-rmse:0.231417	test-rmse:0.243581
[280]	train-rmse:0.230808	test-rmse:0.243296
[290]	train-rmse:0.230311	test-rmse:0.243118
[300]	train-rmse:0.229784	test-rmse:0.242925
[310]	train-rmse:0.229183	test-rmse:0.242683
[320]	train-rmse:0.228645	test-rmse:0.242526
[330]	train-rmse:0.228081	test-rmse:0.242323
[340]	train-rmse:0.227603	test-rmse:0.242164
[350]	train-rmse:0.227101	test-rmse:0.242019
[360]	train-rmse:0.22661	test-rmse:0.24188
[370]	train-rmse:0.226155	test-rmse:0.241765
[380]	train-rmse:0.225674	test-rmse:0.241643
[390]	train-rmse:0.225169	test-rmse:0.241524
[400]	train-rmse:0.224741	test-rmse:0.24141
[410]	train-rmse:0.224221	test-rmse:0.241279
[420]	train-rmse:0.223788	test-rmse:0.241197
[430]	train-rmse:0.223331	test-rmse:0.24109
[440]	train-rmse:0.222889	test-rmse:0.241004
[450]	train-rmse:0.222371	test-rmse:0.240933
[460]	train-rmse:0.221943	test-rmse:0.240838
[470]	train-rmse:0.22148	test-rmse:0.240746
[480]	train-rmse:0.220922	test-rmse:0.240652
[490]	train-rmse:0.220568	test-rmse:0.24057
[500]	train-rmse:0.220146	test-rmse:0.240505
[510]	train-rmse:0.219699	test-rmse:0.240499
[520]	train-rmse:0.21918	test-rmse:0.240458
[530]	train-rmse:0.218723	test-rmse:0.240394
[540]	train-rmse:0.218302	test-rmse:0.240306
[550]	train-rmse:0.217813	test-rmse:0.240242
[560]	train-rmse:0.217443	test-rmse:0.240152
[570]	train-rmse:0.217047	test-rmse:0.240089
[580]	train-rmse:0.216582	test-rmse:0.240107
[590]	train-rmse:0.216208	test-rmse:0.240091
[600]	train-rmse:0.215883	test-rmse:0.240067
[610]	train-rmse:0.215318	test-rmse:0.239946
[620]	train-rmse:0.214836	test-rmse:0.239905
[630]	train-rmse:0.214393	test-rmse:0.239833
[640]	train-rmse:0.21395	test-rmse:0.239784
[650]	train-rmse:0.21359	test-rmse:0.239744
[660]	train-rmse:0.213177	test-rmse:0.239715
[670]	train-rmse:0.212833	test-rmse:0.239651
[680]	train-rmse:0.212358	test-rmse:0.239581
[690]	train-rmse:0.211876	test-rmse:0.239572
[700]	train-rmse:0.211499	test-rmse:0.239494
[710]	train-rmse:0.211019	test-rmse:0.239458
[720]	train-rmse:0.210622	test-rmse:0.239422
[730]	train-rmse:0.210189	test-rmse:0.239405
[740]	train-rmse:0.20984	test-rmse:0.239365
[750]	train-rmse:0.20949	test-rmse:0.239329
[760]	train-rmse:0.209059	test-rmse:0.239311
[770]	train-rmse:0.208536	test-rmse:0.23926
[780]	train-rmse:0.208153	test-rmse:0.239231
[790]	train-rmse:0.207725	test-rmse:0.239199
[800]	train-rmse:0.207261	test-rmse:0.239157
[810]	train-rmse:0.206921	test-rmse:0.239153
[820]	train-rmse:0.206507	test-rmse:0.239147
[830]	train-rmse:0.206104	test-rmse:0.239154
[840]	train-rmse:0.205783	test-rmse:0.239113
[850]	train-rmse:0.205374	test-rmse:0.239087
[860]	train-rmse:0.205032	test-rmse:0.239038
[870]	train-rmse:0.20463	test-rmse:0.238982
[880]	train-rmse:0.20425	test-rmse:0.238977
[890]	train-rmse:0.204013	test-rmse:0.238956
[900]	train-rmse:0.203543	test-rmse:0.238914
[910]	train-rmse:0.203162	test-rmse:0.238912
[920]	train-rmse:0.20279	test-rmse:0.238857
[930]	train-rmse:0.20238	test-rmse:0.238815
[940]	train-rmse:0.201958	test-rmse:0.23879
[950]	train-rmse:0.20162	test-rmse:0.238777
[960]	train-rmse:0.201237	test-rmse:0.23877
[970]	train-rmse:0.20083	test-rmse:0.238715
[980]	train-rmse:0.200407	test-rmse:0.238701
[990]	train-rmse:0.200056	test-rmse:0.238695
[1000]	train-rmse:0.199647	test-rmse:0.238688
[1010]	train-rmse:0.199312	test-rmse:0.238664
[1020]	train-rmse:0.198949	test-rmse:0.238643
[1030]	train-rmse:0.198562	test-rmse:0.238653
[1040]	train-rmse:0.198151	test-rmse:0.23864
[1050]	train-rmse:0.197744	test-rmse:0.238594
[1060]	train-rmse:0.197473	test-rmse:0.238589
[1070]	train-rmse:0.197102	test-rmse:0.238545
[1080]	train-rmse:0.196735	test-rmse:0.238548
[1090]	train-rmse:0.196317	test-rmse:0.238526
[1100]	train-rmse:0.195967	test-rmse:0.238511
[1110]	train-rmse:0.195552	test-rmse:0.238477
[1120]	train-rmse:0.195151	test-rmse:0.238464
[1130]	train-rmse:0.194797	test-rmse:0.238431
[1140]	train-rmse:0.194417	test-rmse:0.238408
[1150]	train-rmse:0.194012	test-rmse:0.23843
[1160]	train-rmse:0.19364	test-rmse:0.238402
[1170]	train-rmse:0.193284	test-rmse:0.238379
[1180]	train-rmse:0.192892	test-rmse:0.238384
[1190]	train-rmse:0.192505	test-rmse:0.238393
[1200]	train-rmse:0.192143	test-rmse:0.238406
[1210]	train-rmse:0.191809	test-rmse:0.238378
[1220]	train-rmse:0.191433	test-rmse:0.238368
[1230]	train-rmse:0.19106	test-rmse:0.238397
[1240]	train-rmse:0.190678	test-rmse:0.238407
[1250]	train-rmse:0.19031	test-rmse:0.238365
[1260]	train-rmse:0.189949	test-rmse:0.238371
[1270]	train-rmse:0.189674	test-rmse:0.238358
[1280]	train-rmse:0.189368	test-rmse:0.238356
[1290]	train-rmse:0.189	test-rmse:0.238338
[1300]	train-rmse:0.18863	test-rmse:0.238333
[1310]	train-rmse:0.188249	test-rmse:0.238355
[1320]	train-rmse:0.187893	test-rmse:0.238351
[1330]	train-rmse:0.187557	test-rmse:0.238325
[1340]	train-rmse:0.187207	test-rmse:0.238317
[1350]	train-rmse:0.18692	test-rmse:0.23832
[1360]	train-rmse:0.18655	test-rmse:0.238324
[1370]	train-rmse:0.186231	test-rmse:0.238346
[1380]	train-rmse:0.185921	test-rmse:0.238348
Stopping. Best iteration:
[1339]	train-rmse:0.187228	test-rmse:0.238312

[0.237486, 0.237626, 0.240721, 0.23917, 0.238312]
0.238666

In [13]:
dfs1 = run_to_stackdf(rv1)
pickle.dump(dfs1, open('modeloutput-xgb-reg-r3.pkl', 'wb'))

In [ ]: