In [1]:
import os
import sys
import operator
import numpy as np
import pandas as pd
from scipy import sparse
import xgboost as xgb
import random
from sklearn import model_selection, preprocessing, ensemble
from sklearn.metrics import log_loss
from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer

import pickle

import sklearn.cluster

import Levenshtein

from multiprocessing import Pool

In [2]:
train_df = pd.read_pickle('fin-dprep-train.pkl')
test_df = pd.read_pickle('fin-dprep-test.pkl')

features_to_use = pickle.load(open('fin-dprep-flist.pkl', 'rb'))

medium_price = pd.read_pickle('fin-medium-price.pkl')

train_df = pd.merge(train_df, medium_price, left_on='listing_id', right_index=True)
test_df = pd.merge(test_df, medium_price, left_on='listing_id', right_index=True)

In [3]:
train_df["predicted_price_diff"] = np.log(train_df["price"]) - np.log(train_df["predicted_price"])
test_df["predicted_price_diff"] = np.log(test_df["price"]) - np.log(test_df["predicted_price"])

In [4]:
class MeansProcessor:
    def __init__(self, key, outkey = None, tgt = 'interest_cat'):
        self.key = key
        self.outkey = key if outkey is None else outkey
        
        self.count = {}
        self.means = {}
        self.std = {}
        self.global_means = 0
        
        self.tgt = tgt
        
        self.outkeys = [self.outkey + '_level', self.outkey + '_level_std']
        
    def fit(self, df):
        self.global_means = df[self.tgt].mean()
            
        for k in df.groupby(self.key, sort=False):
            
            self.count[k[0]] = len(k[1])

            if len(k[1]) < 0:
                self.means[k[0]] = np.nan
                self.std[k[0]] = np.nan
            else:
                self.means[k[0]] = np.mean(k[1][self.tgt])
                self.std[k[0]] = np.std(k[1][self.tgt])
            
    def predict(self, df):
        for l in self.outkeys:
            df[l] = np.nan # self.global_means[l]
            
        df[self.outkey + '_count'] = 0
            
        for k in df.groupby(self.key, sort=False):
            if k[0] == 0:
                continue
            
            if k[0] in self.means:
                df.loc[k[1].index, self.outkey + '_count'] = self.count[k[0]]
                df.loc[k[1].index, self.outkey + '_level'] = self.means[k[0]]
                df.loc[k[1].index, self.outkey + '_level_std'] = self.std[k[0]]
        
        return df
    
    def get_features(self):
        return self.outkeys.copy() + [self.outkey + '_count']

# i kept the same index randomization (with fixed seed) so I could validate this code against
# the original...

target_num_map = {'low':0, 'medium':1, 'high':2}
train_y = np.array(train_df['interest_level'].apply(lambda x: target_num_map[x]))

def proc_fold(fold):
    train_index = fold[0]
    test_index = fold[1]
    
    cv_train = train_df.iloc[train_index]
    cv_valid = train_df.iloc[test_index][['interest_level', 'manager_id', 'building_id']]
    cv_test = test_df.copy()
    
    m_build = MeansProcessor('building_id', 'building_sort')
    m_build.fit(cv_train)
    cv_valid = m_build.predict(cv_valid)
    cv_test = m_build.predict(cv_test)

    m_mgr = MeansProcessor('manager_id', 'manager_sort')
    m_mgr.fit(cv_train)
    cv_valid = m_mgr.predict(cv_valid)
    cv_test = m_mgr.predict(cv_test)

    m_comb = MeansProcessor(['building_id', 'manager_id'], 'mb_comb')
    m_comb.fit(cv_train)
    cv_valid = m_comb.predict(cv_valid)
    cv_test = m_comb.predict(cv_test)

    return cv_train, cv_valid, cv_test

kf = model_selection.StratifiedKFold(n_splits=5, shuffle=True, random_state=2016)
folds = [(k[0], k[1]) for k in kf.split(list(range(train_df.shape[0])), train_y)]

#with Pool(5) as pool:
#    rv = pool.map(proc_fold, folds)

import pickle

try:
    rv = pickle.load(open('0420-model-groupfeatures.pkl', 'rb'))
except:
    with Pool(5) as pool:
        rv = pool.map(proc_fold, folds)

        pickle.dump(rv, open('0420-model-groupfeatures.pkl', 'wb'))

# dummies to get feature id's
m_build = MeansProcessor('building_id', 'building_sort')
m_mgr = MeansProcessor('manager_id', 'manager_sort')
m_comb = MeansProcessor(['building_id', 'manager_id'], 'mb_comb')

group_features = m_build.get_features() + m_mgr.get_features() + m_comb.get_features()

cv_test = []
for r in rv:
    cv_test.append(test_df.merge(r[2][group_features], left_index=True, right_index=True))

cv_allvalid = pd.concat([r[1] for r in rv])

train_df = train_df.merge(cv_allvalid[group_features], left_index=True, right_index=True)

In [5]:
train_ids = []
val_ids = []

for dev_index, val_index in kf.split(range(train_df.shape[0]), train_df.interest_cat):
    train_ids.append(train_df.iloc[dev_index].listing_id.values)
    val_ids.append(train_df.iloc[val_index].listing_id.values)

In [6]:
#fl = features_to_use + m_build.get_features() + m_mgr.get_features() + m_comb.get_features() + tfidf_fn

fl = features_to_use.copy() + group_features 

#fl.remove('price')
#fl.remove('price_t')
#fl.remove('price_per_room')
fl.append('predicted_price')
fl.append('predicted_price_diff')

fl.append('manager_lazy_rate')

fl.append('density_exp01')

In [10]:
def run3_to_stackdf(run):
    
    df_testpreds3 = pd.DataFrame(run[2].mean(axis=0))
    df_testpreds3.columns = ['low', 'medium', 'high']
    df_testpreds3['listing_id'] = test_df.listing_id

    df_allpreds3 = pd.concat([run[1][['low', 'medium', 'high', 'listing_id']], df_testpreds3])

    df_allpreds3.sort_values('listing_id', inplace=True)
    df_allpreds3.set_index('listing_id', inplace=True)
    
    df_fold = []
    for f in range(run[2].shape[0]):
        df_fold.append(pd.DataFrame(run[2][f]))
        df_fold[-1]['listing_id'] = test_df.listing_id
        df_fold[-1].sort_values('listing_id', inplace=True)
        df_fold[-1].set_index('listing_id', inplace=True)

    return (df_allpreds3, df_fold)

In [11]:
def runXGB(train_X, train_y, test_X, test_y=None, feature_names=None, seed_val=0, num_rounds=4000):
    param = {}
    param['objective'] = 'multi:softprob'
    #param['tree_method'] = 'hist'
    param['eta'] = 0.02
    param['max_depth'] = 6
    param['silent'] = 1
    param['num_class'] = 3
    param['eval_metric'] = "mlogloss"
    param['min_child_weight'] = 1
    param['subsample'] = 0.7
    param['colsample_bytree'] = 0.7
    param['seed'] = seed_val
    #param['base_score'] = [np.mean(train_y == i) for i in [0, 1, 2]]
    num_rounds = num_rounds

    plst = list(param.items())
    xgtrain = xgb.DMatrix(train_X, label=train_y)

    if test_y is not None:
        xgtest = xgb.DMatrix(test_X, label=test_y)
        watchlist = [ (xgtrain,'train'), (xgtest, 'test') ]
        model = xgb.train(plst, xgtrain, num_rounds, watchlist, early_stopping_rounds=50, verbose_eval=10)
    else:
        xgtest = xgb.DMatrix(test_X)
        model = xgb.train(plst, xgtrain, num_rounds)

    pred_test_y = model.predict(xgtest, ntree_limit=model.best_ntree_limit)
    return pred_test_y, model

In [12]:
def run_cv(train_df, cv_test, kf, features_to_use):
    train_X = train_df[features_to_use]
    train_y = np.array(train_df['interest_level'].apply(lambda x: target_num_map[x]))

    cv_preds = []
    cv_scores = []
    models = []
    test_preds = []
    
    fold = 0

    for dev_index, val_index in kf.split(range(train_X.shape[0]), train_y):

        dev_X, val_X = train_X.iloc[dev_index], train_X.iloc[val_index]
        dev_y, val_y = train_y[dev_index], train_y[val_index]
        preds, model = runXGB(dev_X, dev_y, val_X, val_y)
        models.append(model)

        cv_scores.append(log_loss(val_y, preds))
        print(cv_scores)

        cut_df = train_df.iloc[val_index]
        out_df = pd.DataFrame(preds)
        out_df.columns = ["low", "medium", "high"]
        out_df["listing_id"] = cut_df.listing_id.values
        interest = cut_df.interest_level.apply(lambda x: target_num_map[x])
        out_df['interest_tgt'] = interest.values

        cv_preds.append(out_df)

        xgtest = xgb.DMatrix(cv_test[fold][features_to_use])
        test_preds.append(model.predict(xgtest, ntree_limit=model.best_ntree_limit))

    df_cv = pd.concat(cv_preds)
    print(log_loss(df_cv.interest_tgt, df_cv[['low', 'medium', 'high']]))

    apreds = np.array(test_preds)
    
    return models, df_cv, apreds

In [13]:
rv3 = run_cv(train_df, cv_test, kf, fl)


[0]	train-mlogloss:1.08457	test-mlogloss:1.08474
Multiple eval metrics have been passed: 'test-mlogloss' will be used for early stopping.

Will train until test-mlogloss hasn't improved in 50 rounds.
[10]	train-mlogloss:0.96348	test-mlogloss:0.965683
[20]	train-mlogloss:0.872029	test-mlogloss:0.876469
[30]	train-mlogloss:0.801367	test-mlogloss:0.807939
[40]	train-mlogloss:0.745739	test-mlogloss:0.754607
[50]	train-mlogloss:0.701679	test-mlogloss:0.712797
[60]	train-mlogloss:0.666485	test-mlogloss:0.679807
[70]	train-mlogloss:0.637654	test-mlogloss:0.653089
[80]	train-mlogloss:0.614372	test-mlogloss:0.631793
[90]	train-mlogloss:0.595055	test-mlogloss:0.614436
[100]	train-mlogloss:0.579094	test-mlogloss:0.60046
[110]	train-mlogloss:0.565481	test-mlogloss:0.588773
[120]	train-mlogloss:0.553736	test-mlogloss:0.5789
[130]	train-mlogloss:0.543894	test-mlogloss:0.570894
[140]	train-mlogloss:0.535246	test-mlogloss:0.564114
[150]	train-mlogloss:0.527701	test-mlogloss:0.558442
[160]	train-mlogloss:0.521015	test-mlogloss:0.553475
[170]	train-mlogloss:0.515087	test-mlogloss:0.549249
[180]	train-mlogloss:0.509658	test-mlogloss:0.545518
[190]	train-mlogloss:0.504808	test-mlogloss:0.542311
[200]	train-mlogloss:0.500315	test-mlogloss:0.53951
[210]	train-mlogloss:0.496037	test-mlogloss:0.537072
[220]	train-mlogloss:0.492217	test-mlogloss:0.534919
[230]	train-mlogloss:0.488477	test-mlogloss:0.532879
[240]	train-mlogloss:0.485089	test-mlogloss:0.531083
[250]	train-mlogloss:0.481918	test-mlogloss:0.529455
[260]	train-mlogloss:0.479012	test-mlogloss:0.528067
[270]	train-mlogloss:0.476019	test-mlogloss:0.52667
[280]	train-mlogloss:0.473255	test-mlogloss:0.525446
[290]	train-mlogloss:0.470753	test-mlogloss:0.52427
[300]	train-mlogloss:0.468024	test-mlogloss:0.523174
[310]	train-mlogloss:0.465594	test-mlogloss:0.522203
[320]	train-mlogloss:0.463247	test-mlogloss:0.521422
[330]	train-mlogloss:0.461059	test-mlogloss:0.520575
[340]	train-mlogloss:0.458819	test-mlogloss:0.519766
[350]	train-mlogloss:0.456705	test-mlogloss:0.519057
[360]	train-mlogloss:0.454647	test-mlogloss:0.518484
[370]	train-mlogloss:0.452473	test-mlogloss:0.517882
[380]	train-mlogloss:0.450452	test-mlogloss:0.517276
[390]	train-mlogloss:0.448549	test-mlogloss:0.516619
[400]	train-mlogloss:0.446706	test-mlogloss:0.516004
[410]	train-mlogloss:0.444759	test-mlogloss:0.515526
[420]	train-mlogloss:0.442839	test-mlogloss:0.5151
[430]	train-mlogloss:0.441014	test-mlogloss:0.514779
[440]	train-mlogloss:0.439166	test-mlogloss:0.514378
[450]	train-mlogloss:0.437239	test-mlogloss:0.513916
[460]	train-mlogloss:0.43534	test-mlogloss:0.513468
[470]	train-mlogloss:0.433629	test-mlogloss:0.513086
[480]	train-mlogloss:0.431744	test-mlogloss:0.512774
[490]	train-mlogloss:0.430184	test-mlogloss:0.512465
[500]	train-mlogloss:0.428682	test-mlogloss:0.512167
[510]	train-mlogloss:0.427056	test-mlogloss:0.511812
[520]	train-mlogloss:0.42544	test-mlogloss:0.511541
[530]	train-mlogloss:0.423883	test-mlogloss:0.511311
[540]	train-mlogloss:0.422333	test-mlogloss:0.511023
[550]	train-mlogloss:0.420689	test-mlogloss:0.5108
[560]	train-mlogloss:0.419159	test-mlogloss:0.510596
[570]	train-mlogloss:0.41781	test-mlogloss:0.510369
[580]	train-mlogloss:0.416276	test-mlogloss:0.51014
[590]	train-mlogloss:0.414686	test-mlogloss:0.510003
[600]	train-mlogloss:0.413281	test-mlogloss:0.509872
[610]	train-mlogloss:0.411911	test-mlogloss:0.509679
[620]	train-mlogloss:0.410288	test-mlogloss:0.509531
[630]	train-mlogloss:0.408764	test-mlogloss:0.509315
[640]	train-mlogloss:0.407232	test-mlogloss:0.509203
[650]	train-mlogloss:0.405755	test-mlogloss:0.509051
[660]	train-mlogloss:0.404273	test-mlogloss:0.508909
[670]	train-mlogloss:0.402781	test-mlogloss:0.508724
[680]	train-mlogloss:0.401333	test-mlogloss:0.50858
[690]	train-mlogloss:0.399852	test-mlogloss:0.508485
[700]	train-mlogloss:0.398293	test-mlogloss:0.508391
[710]	train-mlogloss:0.396752	test-mlogloss:0.508235
[720]	train-mlogloss:0.395181	test-mlogloss:0.508152
[730]	train-mlogloss:0.39363	test-mlogloss:0.507957
[740]	train-mlogloss:0.392286	test-mlogloss:0.507847
[750]	train-mlogloss:0.39087	test-mlogloss:0.507639
[760]	train-mlogloss:0.389552	test-mlogloss:0.507487
[770]	train-mlogloss:0.388145	test-mlogloss:0.507513
[780]	train-mlogloss:0.386686	test-mlogloss:0.507354
[790]	train-mlogloss:0.385257	test-mlogloss:0.507271
[800]	train-mlogloss:0.383852	test-mlogloss:0.507223
[810]	train-mlogloss:0.382626	test-mlogloss:0.507124
[820]	train-mlogloss:0.381251	test-mlogloss:0.507035
[830]	train-mlogloss:0.379903	test-mlogloss:0.50701
[840]	train-mlogloss:0.378773	test-mlogloss:0.506868
[850]	train-mlogloss:0.377311	test-mlogloss:0.50674
[860]	train-mlogloss:0.375907	test-mlogloss:0.506648
[870]	train-mlogloss:0.374456	test-mlogloss:0.506565
[880]	train-mlogloss:0.373204	test-mlogloss:0.506465
[890]	train-mlogloss:0.371972	test-mlogloss:0.506396
[900]	train-mlogloss:0.370537	test-mlogloss:0.506338
[910]	train-mlogloss:0.369345	test-mlogloss:0.506243
[920]	train-mlogloss:0.368102	test-mlogloss:0.506188
[930]	train-mlogloss:0.366794	test-mlogloss:0.506226
[940]	train-mlogloss:0.365392	test-mlogloss:0.506145
[950]	train-mlogloss:0.363955	test-mlogloss:0.506165
[960]	train-mlogloss:0.362698	test-mlogloss:0.506048
[970]	train-mlogloss:0.361551	test-mlogloss:0.506006
[980]	train-mlogloss:0.360458	test-mlogloss:0.505943
[990]	train-mlogloss:0.359043	test-mlogloss:0.50604
[1000]	train-mlogloss:0.357772	test-mlogloss:0.506029
[1010]	train-mlogloss:0.356413	test-mlogloss:0.506072
[1020]	train-mlogloss:0.355174	test-mlogloss:0.506186
Stopping. Best iteration:
[978]	train-mlogloss:0.360616	test-mlogloss:0.505937

[0.50593702950354136]
[0]	train-mlogloss:1.08465	test-mlogloss:1.08469
Multiple eval metrics have been passed: 'test-mlogloss' will be used for early stopping.

Will train until test-mlogloss hasn't improved in 50 rounds.
[10]	train-mlogloss:0.963941	test-mlogloss:0.965377
[20]	train-mlogloss:0.872884	test-mlogloss:0.875824
[30]	train-mlogloss:0.802516	test-mlogloss:0.807074
[40]	train-mlogloss:0.747009	test-mlogloss:0.753138
[50]	train-mlogloss:0.703105	test-mlogloss:0.710825
[60]	train-mlogloss:0.668051	test-mlogloss:0.677339
[70]	train-mlogloss:0.639167	test-mlogloss:0.650131
[80]	train-mlogloss:0.615889	test-mlogloss:0.628493
[90]	train-mlogloss:0.596526	test-mlogloss:0.610784
[100]	train-mlogloss:0.580618	test-mlogloss:0.596611
[110]	train-mlogloss:0.567006	test-mlogloss:0.584608
[120]	train-mlogloss:0.555304	test-mlogloss:0.574531
[130]	train-mlogloss:0.545487	test-mlogloss:0.566372
[140]	train-mlogloss:0.536853	test-mlogloss:0.559444
[150]	train-mlogloss:0.529277	test-mlogloss:0.553654
[160]	train-mlogloss:0.522551	test-mlogloss:0.548557
[170]	train-mlogloss:0.516529	test-mlogloss:0.544076
[180]	train-mlogloss:0.511107	test-mlogloss:0.540329
[190]	train-mlogloss:0.506237	test-mlogloss:0.537125
[200]	train-mlogloss:0.501638	test-mlogloss:0.534176
[210]	train-mlogloss:0.497498	test-mlogloss:0.531654
[220]	train-mlogloss:0.49361	test-mlogloss:0.529387
[230]	train-mlogloss:0.490053	test-mlogloss:0.527486
[240]	train-mlogloss:0.48668	test-mlogloss:0.525641
[250]	train-mlogloss:0.483373	test-mlogloss:0.524038
[260]	train-mlogloss:0.480316	test-mlogloss:0.522616
[270]	train-mlogloss:0.477294	test-mlogloss:0.521411
[280]	train-mlogloss:0.474683	test-mlogloss:0.520333
[290]	train-mlogloss:0.472036	test-mlogloss:0.519288
[300]	train-mlogloss:0.469485	test-mlogloss:0.518285
[310]	train-mlogloss:0.467279	test-mlogloss:0.517407
[320]	train-mlogloss:0.464915	test-mlogloss:0.516607
[330]	train-mlogloss:0.462639	test-mlogloss:0.515831
[340]	train-mlogloss:0.460544	test-mlogloss:0.51514
[350]	train-mlogloss:0.458509	test-mlogloss:0.514536
[360]	train-mlogloss:0.456545	test-mlogloss:0.513944
[370]	train-mlogloss:0.454397	test-mlogloss:0.513326
[380]	train-mlogloss:0.452365	test-mlogloss:0.512784
[390]	train-mlogloss:0.450522	test-mlogloss:0.512343
[400]	train-mlogloss:0.448609	test-mlogloss:0.511897
[410]	train-mlogloss:0.446703	test-mlogloss:0.511406
[420]	train-mlogloss:0.444894	test-mlogloss:0.510944
[430]	train-mlogloss:0.443086	test-mlogloss:0.510522
[440]	train-mlogloss:0.441377	test-mlogloss:0.51015
[450]	train-mlogloss:0.439603	test-mlogloss:0.509825
[460]	train-mlogloss:0.4379	test-mlogloss:0.509541
[470]	train-mlogloss:0.436303	test-mlogloss:0.509197
[480]	train-mlogloss:0.434644	test-mlogloss:0.508899
[490]	train-mlogloss:0.433004	test-mlogloss:0.508607
[500]	train-mlogloss:0.431324	test-mlogloss:0.508303
[510]	train-mlogloss:0.429729	test-mlogloss:0.508034
[520]	train-mlogloss:0.428004	test-mlogloss:0.50771
[530]	train-mlogloss:0.426386	test-mlogloss:0.507444
[540]	train-mlogloss:0.4249	test-mlogloss:0.507201
[550]	train-mlogloss:0.42345	test-mlogloss:0.506983
[560]	train-mlogloss:0.421807	test-mlogloss:0.506693
[570]	train-mlogloss:0.42017	test-mlogloss:0.506469
[580]	train-mlogloss:0.418476	test-mlogloss:0.506256
[590]	train-mlogloss:0.416969	test-mlogloss:0.506052
[600]	train-mlogloss:0.415227	test-mlogloss:0.505825
[610]	train-mlogloss:0.413783	test-mlogloss:0.505637
[620]	train-mlogloss:0.412143	test-mlogloss:0.505484
[630]	train-mlogloss:0.410738	test-mlogloss:0.505305
[640]	train-mlogloss:0.409167	test-mlogloss:0.505089
[650]	train-mlogloss:0.407679	test-mlogloss:0.504951
[660]	train-mlogloss:0.40628	test-mlogloss:0.504828
[670]	train-mlogloss:0.404878	test-mlogloss:0.504752
[680]	train-mlogloss:0.403509	test-mlogloss:0.504632
[690]	train-mlogloss:0.402086	test-mlogloss:0.504476
[700]	train-mlogloss:0.400403	test-mlogloss:0.504288
[710]	train-mlogloss:0.399067	test-mlogloss:0.504159
[720]	train-mlogloss:0.397732	test-mlogloss:0.504061
[730]	train-mlogloss:0.396314	test-mlogloss:0.503895
[740]	train-mlogloss:0.394916	test-mlogloss:0.503794
[750]	train-mlogloss:0.393338	test-mlogloss:0.503783
[760]	train-mlogloss:0.391913	test-mlogloss:0.50361
[770]	train-mlogloss:0.39051	test-mlogloss:0.503497
[780]	train-mlogloss:0.389071	test-mlogloss:0.503325
[790]	train-mlogloss:0.387626	test-mlogloss:0.503229
[800]	train-mlogloss:0.386148	test-mlogloss:0.50313
[810]	train-mlogloss:0.384836	test-mlogloss:0.503032
[820]	train-mlogloss:0.383432	test-mlogloss:0.502824
[830]	train-mlogloss:0.382271	test-mlogloss:0.502731
[840]	train-mlogloss:0.380872	test-mlogloss:0.502597
[850]	train-mlogloss:0.379553	test-mlogloss:0.502508
[860]	train-mlogloss:0.378371	test-mlogloss:0.502451
[870]	train-mlogloss:0.377141	test-mlogloss:0.502422
[880]	train-mlogloss:0.375911	test-mlogloss:0.502369
[890]	train-mlogloss:0.374638	test-mlogloss:0.502291
[900]	train-mlogloss:0.373167	test-mlogloss:0.502257
[910]	train-mlogloss:0.371807	test-mlogloss:0.502202
[920]	train-mlogloss:0.37037	test-mlogloss:0.502091
[930]	train-mlogloss:0.368988	test-mlogloss:0.502085
[940]	train-mlogloss:0.367668	test-mlogloss:0.50207
[950]	train-mlogloss:0.366394	test-mlogloss:0.502037
[960]	train-mlogloss:0.365111	test-mlogloss:0.50196
[970]	train-mlogloss:0.363908	test-mlogloss:0.501993
[980]	train-mlogloss:0.362627	test-mlogloss:0.501904
[990]	train-mlogloss:0.361387	test-mlogloss:0.501837
[1000]	train-mlogloss:0.360052	test-mlogloss:0.501789
[1010]	train-mlogloss:0.358685	test-mlogloss:0.501799
[1020]	train-mlogloss:0.357445	test-mlogloss:0.50179
[1030]	train-mlogloss:0.356138	test-mlogloss:0.501749
[1040]	train-mlogloss:0.354901	test-mlogloss:0.501769
[1050]	train-mlogloss:0.353704	test-mlogloss:0.501707
[1060]	train-mlogloss:0.352372	test-mlogloss:0.501639
[1070]	train-mlogloss:0.351089	test-mlogloss:0.50154
[1080]	train-mlogloss:0.34995	test-mlogloss:0.501438
[1090]	train-mlogloss:0.348707	test-mlogloss:0.501354
[1100]	train-mlogloss:0.3475	test-mlogloss:0.501286
[1110]	train-mlogloss:0.346212	test-mlogloss:0.50122
[1120]	train-mlogloss:0.344985	test-mlogloss:0.501237
[1130]	train-mlogloss:0.34382	test-mlogloss:0.501222
[1140]	train-mlogloss:0.342606	test-mlogloss:0.501121
[1150]	train-mlogloss:0.341407	test-mlogloss:0.501047
[1160]	train-mlogloss:0.340203	test-mlogloss:0.500928
[1170]	train-mlogloss:0.339119	test-mlogloss:0.500996
[1180]	train-mlogloss:0.337938	test-mlogloss:0.500952
[1190]	train-mlogloss:0.336759	test-mlogloss:0.500963
[1200]	train-mlogloss:0.335569	test-mlogloss:0.501018
[1210]	train-mlogloss:0.33444	test-mlogloss:0.500997
Stopping. Best iteration:
[1161]	train-mlogloss:0.340089	test-mlogloss:0.500923

[0.50593702950354136, 0.50092266559477416]
[0]	train-mlogloss:1.08448	test-mlogloss:1.08495
Multiple eval metrics have been passed: 'test-mlogloss' will be used for early stopping.

Will train until test-mlogloss hasn't improved in 50 rounds.
[10]	train-mlogloss:0.962992	test-mlogloss:0.967513
[20]	train-mlogloss:0.871387	test-mlogloss:0.879663
[30]	train-mlogloss:0.800331	test-mlogloss:0.811987
[40]	train-mlogloss:0.744479	test-mlogloss:0.759205
[50]	train-mlogloss:0.700321	test-mlogloss:0.717827
[60]	train-mlogloss:0.664943	test-mlogloss:0.685229
[70]	train-mlogloss:0.63587	test-mlogloss:0.658718
[80]	train-mlogloss:0.612489	test-mlogloss:0.63765
[90]	train-mlogloss:0.593076	test-mlogloss:0.620559
[100]	train-mlogloss:0.577102	test-mlogloss:0.606687
[110]	train-mlogloss:0.563492	test-mlogloss:0.595101
[120]	train-mlogloss:0.551667	test-mlogloss:0.585425
[130]	train-mlogloss:0.541728	test-mlogloss:0.577582
[140]	train-mlogloss:0.533017	test-mlogloss:0.570908
[150]	train-mlogloss:0.525484	test-mlogloss:0.565362
[160]	train-mlogloss:0.518796	test-mlogloss:0.560553
[170]	train-mlogloss:0.512791	test-mlogloss:0.556424
[180]	train-mlogloss:0.507402	test-mlogloss:0.552787
[190]	train-mlogloss:0.502443	test-mlogloss:0.549634
[200]	train-mlogloss:0.497977	test-mlogloss:0.546937
[210]	train-mlogloss:0.493831	test-mlogloss:0.544581
[220]	train-mlogloss:0.489909	test-mlogloss:0.542378
[230]	train-mlogloss:0.486345	test-mlogloss:0.54051
[240]	train-mlogloss:0.482975	test-mlogloss:0.538836
[250]	train-mlogloss:0.479899	test-mlogloss:0.53742
[260]	train-mlogloss:0.476858	test-mlogloss:0.536085
[270]	train-mlogloss:0.473934	test-mlogloss:0.534844
[280]	train-mlogloss:0.471232	test-mlogloss:0.533697
[290]	train-mlogloss:0.468451	test-mlogloss:0.532529
[300]	train-mlogloss:0.466032	test-mlogloss:0.531734
[310]	train-mlogloss:0.46357	test-mlogloss:0.530874
[320]	train-mlogloss:0.461214	test-mlogloss:0.530003
[330]	train-mlogloss:0.458952	test-mlogloss:0.529224
[340]	train-mlogloss:0.456858	test-mlogloss:0.528584
[350]	train-mlogloss:0.454687	test-mlogloss:0.527964
[360]	train-mlogloss:0.452574	test-mlogloss:0.527348
[370]	train-mlogloss:0.450331	test-mlogloss:0.526721
[380]	train-mlogloss:0.448268	test-mlogloss:0.5262
[390]	train-mlogloss:0.446238	test-mlogloss:0.52568
[400]	train-mlogloss:0.444063	test-mlogloss:0.525134
[410]	train-mlogloss:0.442434	test-mlogloss:0.524733
[420]	train-mlogloss:0.440619	test-mlogloss:0.524316
[430]	train-mlogloss:0.438639	test-mlogloss:0.523913
[440]	train-mlogloss:0.43682	test-mlogloss:0.52349
[450]	train-mlogloss:0.434939	test-mlogloss:0.523073
[460]	train-mlogloss:0.433449	test-mlogloss:0.522746
[470]	train-mlogloss:0.431705	test-mlogloss:0.522426
[480]	train-mlogloss:0.430039	test-mlogloss:0.522045
[490]	train-mlogloss:0.428146	test-mlogloss:0.521781
[500]	train-mlogloss:0.42652	test-mlogloss:0.521472
[510]	train-mlogloss:0.424719	test-mlogloss:0.521102
[520]	train-mlogloss:0.422946	test-mlogloss:0.52082
[530]	train-mlogloss:0.421542	test-mlogloss:0.520601
[540]	train-mlogloss:0.419967	test-mlogloss:0.520308
[550]	train-mlogloss:0.418461	test-mlogloss:0.52006
[560]	train-mlogloss:0.416766	test-mlogloss:0.519882
[570]	train-mlogloss:0.415187	test-mlogloss:0.519651
[580]	train-mlogloss:0.413686	test-mlogloss:0.519503
[590]	train-mlogloss:0.412156	test-mlogloss:0.519169
[600]	train-mlogloss:0.410559	test-mlogloss:0.518938
[610]	train-mlogloss:0.409067	test-mlogloss:0.518637
[620]	train-mlogloss:0.407593	test-mlogloss:0.518468
[630]	train-mlogloss:0.406231	test-mlogloss:0.518315
[640]	train-mlogloss:0.404836	test-mlogloss:0.518138
[650]	train-mlogloss:0.403466	test-mlogloss:0.517991
[660]	train-mlogloss:0.401837	test-mlogloss:0.517897
[670]	train-mlogloss:0.400135	test-mlogloss:0.517863
[680]	train-mlogloss:0.398719	test-mlogloss:0.517655
[690]	train-mlogloss:0.397267	test-mlogloss:0.517461
[700]	train-mlogloss:0.395902	test-mlogloss:0.517382
[710]	train-mlogloss:0.39439	test-mlogloss:0.517265
[720]	train-mlogloss:0.392892	test-mlogloss:0.517121
[730]	train-mlogloss:0.39139	test-mlogloss:0.516973
[740]	train-mlogloss:0.389994	test-mlogloss:0.516863
[750]	train-mlogloss:0.388442	test-mlogloss:0.516847
[760]	train-mlogloss:0.387271	test-mlogloss:0.516768
[770]	train-mlogloss:0.385891	test-mlogloss:0.516702
[780]	train-mlogloss:0.384594	test-mlogloss:0.516582
[790]	train-mlogloss:0.383122	test-mlogloss:0.516435
[800]	train-mlogloss:0.381774	test-mlogloss:0.516267
[810]	train-mlogloss:0.380436	test-mlogloss:0.516221
[820]	train-mlogloss:0.379153	test-mlogloss:0.516108
[830]	train-mlogloss:0.3778	test-mlogloss:0.516002
[840]	train-mlogloss:0.376467	test-mlogloss:0.515907
[850]	train-mlogloss:0.375012	test-mlogloss:0.515857
[860]	train-mlogloss:0.373545	test-mlogloss:0.515738
[870]	train-mlogloss:0.372192	test-mlogloss:0.515677
[880]	train-mlogloss:0.370975	test-mlogloss:0.515608
[890]	train-mlogloss:0.369831	test-mlogloss:0.51551
[900]	train-mlogloss:0.368541	test-mlogloss:0.515394
[910]	train-mlogloss:0.367195	test-mlogloss:0.515328
[920]	train-mlogloss:0.365893	test-mlogloss:0.515281
[930]	train-mlogloss:0.364575	test-mlogloss:0.515194
[940]	train-mlogloss:0.363339	test-mlogloss:0.515177
[950]	train-mlogloss:0.362095	test-mlogloss:0.515247
[960]	train-mlogloss:0.360748	test-mlogloss:0.515157
[970]	train-mlogloss:0.359497	test-mlogloss:0.515171
[980]	train-mlogloss:0.358197	test-mlogloss:0.515152
[990]	train-mlogloss:0.356917	test-mlogloss:0.515155
[1000]	train-mlogloss:0.355645	test-mlogloss:0.51515
[1010]	train-mlogloss:0.354457	test-mlogloss:0.515066
[1020]	train-mlogloss:0.353347	test-mlogloss:0.514978
[1030]	train-mlogloss:0.352152	test-mlogloss:0.514936
[1040]	train-mlogloss:0.351011	test-mlogloss:0.514856
[1050]	train-mlogloss:0.34973	test-mlogloss:0.514795
[1060]	train-mlogloss:0.348622	test-mlogloss:0.514759
[1070]	train-mlogloss:0.347416	test-mlogloss:0.514751
[1080]	train-mlogloss:0.34623	test-mlogloss:0.514676
[1090]	train-mlogloss:0.34504	test-mlogloss:0.514641
[1100]	train-mlogloss:0.343701	test-mlogloss:0.514627
[1110]	train-mlogloss:0.342402	test-mlogloss:0.514578
[1120]	train-mlogloss:0.341268	test-mlogloss:0.514619
[1130]	train-mlogloss:0.340115	test-mlogloss:0.5146
[1140]	train-mlogloss:0.338997	test-mlogloss:0.514522
[1150]	train-mlogloss:0.337885	test-mlogloss:0.51445
[1160]	train-mlogloss:0.336836	test-mlogloss:0.514434
[1170]	train-mlogloss:0.33569	test-mlogloss:0.514425
[1180]	train-mlogloss:0.334599	test-mlogloss:0.514457
[1190]	train-mlogloss:0.333493	test-mlogloss:0.514396
[1200]	train-mlogloss:0.332379	test-mlogloss:0.514348
[1210]	train-mlogloss:0.331162	test-mlogloss:0.514339
[1220]	train-mlogloss:0.330025	test-mlogloss:0.51435
[1230]	train-mlogloss:0.328848	test-mlogloss:0.514384
[1240]	train-mlogloss:0.327812	test-mlogloss:0.514358
[1250]	train-mlogloss:0.326785	test-mlogloss:0.51435
Stopping. Best iteration:
[1207]	train-mlogloss:0.331518	test-mlogloss:0.514305

[0.50593702950354136, 0.50092266559477416, 0.51430512114807325]
[0]	train-mlogloss:1.08454	test-mlogloss:1.08487
Multiple eval metrics have been passed: 'test-mlogloss' will be used for early stopping.

Will train until test-mlogloss hasn't improved in 50 rounds.
[10]	train-mlogloss:0.963362	test-mlogloss:0.966917
[20]	train-mlogloss:0.871817	test-mlogloss:0.878514
[30]	train-mlogloss:0.800996	test-mlogloss:0.810687
[40]	train-mlogloss:0.745173	test-mlogloss:0.757635
[50]	train-mlogloss:0.700972	test-mlogloss:0.716051
[60]	train-mlogloss:0.665718	test-mlogloss:0.683207
[70]	train-mlogloss:0.636716	test-mlogloss:0.656573
[80]	train-mlogloss:0.613329	test-mlogloss:0.635439
[90]	train-mlogloss:0.5939	test-mlogloss:0.618173
[100]	train-mlogloss:0.577877	test-mlogloss:0.604058
[110]	train-mlogloss:0.564213	test-mlogloss:0.592412
[120]	train-mlogloss:0.552298	test-mlogloss:0.582626
[130]	train-mlogloss:0.54239	test-mlogloss:0.574552
[140]	train-mlogloss:0.533724	test-mlogloss:0.567784
[150]	train-mlogloss:0.526156	test-mlogloss:0.561937
[160]	train-mlogloss:0.519492	test-mlogloss:0.557001
[170]	train-mlogloss:0.513598	test-mlogloss:0.552741
[180]	train-mlogloss:0.508226	test-mlogloss:0.549135
[190]	train-mlogloss:0.503253	test-mlogloss:0.545784
[200]	train-mlogloss:0.498734	test-mlogloss:0.542902
[210]	train-mlogloss:0.494516	test-mlogloss:0.540387
[220]	train-mlogloss:0.490799	test-mlogloss:0.538191
[230]	train-mlogloss:0.487142	test-mlogloss:0.536157
[240]	train-mlogloss:0.483787	test-mlogloss:0.534411
[250]	train-mlogloss:0.4804	test-mlogloss:0.532628
[260]	train-mlogloss:0.477357	test-mlogloss:0.531129
[270]	train-mlogloss:0.47436	test-mlogloss:0.529846
[280]	train-mlogloss:0.47166	test-mlogloss:0.52858
[290]	train-mlogloss:0.468954	test-mlogloss:0.52744
[300]	train-mlogloss:0.466275	test-mlogloss:0.526446
[310]	train-mlogloss:0.463791	test-mlogloss:0.525551
[320]	train-mlogloss:0.461622	test-mlogloss:0.524678
[330]	train-mlogloss:0.459383	test-mlogloss:0.523966
[340]	train-mlogloss:0.457243	test-mlogloss:0.523327
[350]	train-mlogloss:0.454874	test-mlogloss:0.522563
[360]	train-mlogloss:0.452676	test-mlogloss:0.521838
[370]	train-mlogloss:0.450454	test-mlogloss:0.521186
[380]	train-mlogloss:0.448574	test-mlogloss:0.520612
[390]	train-mlogloss:0.446525	test-mlogloss:0.520098
[400]	train-mlogloss:0.444502	test-mlogloss:0.519571
[410]	train-mlogloss:0.442545	test-mlogloss:0.519118
[420]	train-mlogloss:0.440751	test-mlogloss:0.518763
[430]	train-mlogloss:0.439002	test-mlogloss:0.518345
[440]	train-mlogloss:0.437284	test-mlogloss:0.518005
[450]	train-mlogloss:0.435531	test-mlogloss:0.517629
[460]	train-mlogloss:0.433831	test-mlogloss:0.517209
[470]	train-mlogloss:0.432256	test-mlogloss:0.516877
[480]	train-mlogloss:0.430702	test-mlogloss:0.516607
[490]	train-mlogloss:0.429177	test-mlogloss:0.516334
[500]	train-mlogloss:0.427422	test-mlogloss:0.516085
[510]	train-mlogloss:0.425667	test-mlogloss:0.51587
[520]	train-mlogloss:0.424137	test-mlogloss:0.515622
[530]	train-mlogloss:0.42257	test-mlogloss:0.515442
[540]	train-mlogloss:0.421006	test-mlogloss:0.51519
[550]	train-mlogloss:0.419482	test-mlogloss:0.514936
[560]	train-mlogloss:0.417851	test-mlogloss:0.514725
[570]	train-mlogloss:0.416251	test-mlogloss:0.514521
[580]	train-mlogloss:0.414791	test-mlogloss:0.514327
[590]	train-mlogloss:0.413362	test-mlogloss:0.514121
[600]	train-mlogloss:0.411683	test-mlogloss:0.513959
[610]	train-mlogloss:0.410178	test-mlogloss:0.513726
[620]	train-mlogloss:0.408777	test-mlogloss:0.513524
[630]	train-mlogloss:0.407295	test-mlogloss:0.51335
[640]	train-mlogloss:0.405811	test-mlogloss:0.513233
[650]	train-mlogloss:0.404269	test-mlogloss:0.513088
[660]	train-mlogloss:0.402777	test-mlogloss:0.512917
[670]	train-mlogloss:0.401301	test-mlogloss:0.512829
[680]	train-mlogloss:0.399838	test-mlogloss:0.512664
[690]	train-mlogloss:0.398579	test-mlogloss:0.512562
[700]	train-mlogloss:0.397361	test-mlogloss:0.512454
[710]	train-mlogloss:0.395878	test-mlogloss:0.512356
[720]	train-mlogloss:0.394436	test-mlogloss:0.512242
[730]	train-mlogloss:0.39297	test-mlogloss:0.512118
[740]	train-mlogloss:0.391621	test-mlogloss:0.511922
[750]	train-mlogloss:0.390257	test-mlogloss:0.511863
[760]	train-mlogloss:0.388889	test-mlogloss:0.511735
[770]	train-mlogloss:0.387483	test-mlogloss:0.511724
[780]	train-mlogloss:0.38618	test-mlogloss:0.511589
[790]	train-mlogloss:0.384818	test-mlogloss:0.511538
[800]	train-mlogloss:0.383441	test-mlogloss:0.511432
[810]	train-mlogloss:0.382183	test-mlogloss:0.511376
[820]	train-mlogloss:0.380818	test-mlogloss:0.511218
[830]	train-mlogloss:0.379419	test-mlogloss:0.511042
[840]	train-mlogloss:0.378202	test-mlogloss:0.511054
[850]	train-mlogloss:0.376865	test-mlogloss:0.510952
[860]	train-mlogloss:0.375577	test-mlogloss:0.510829
[870]	train-mlogloss:0.374348	test-mlogloss:0.510773
[880]	train-mlogloss:0.373018	test-mlogloss:0.510752
[890]	train-mlogloss:0.371695	test-mlogloss:0.510701
[900]	train-mlogloss:0.370489	test-mlogloss:0.51069
[910]	train-mlogloss:0.36922	test-mlogloss:0.510573
[920]	train-mlogloss:0.367806	test-mlogloss:0.510503
[930]	train-mlogloss:0.366697	test-mlogloss:0.510338
[940]	train-mlogloss:0.365362	test-mlogloss:0.510252
[950]	train-mlogloss:0.364021	test-mlogloss:0.510108
[960]	train-mlogloss:0.362816	test-mlogloss:0.510064
[970]	train-mlogloss:0.361495	test-mlogloss:0.51013
[980]	train-mlogloss:0.360267	test-mlogloss:0.51002
[990]	train-mlogloss:0.359232	test-mlogloss:0.510018
[1000]	train-mlogloss:0.358071	test-mlogloss:0.510004
[1010]	train-mlogloss:0.35679	test-mlogloss:0.509964
[1020]	train-mlogloss:0.355487	test-mlogloss:0.509963
[1030]	train-mlogloss:0.354268	test-mlogloss:0.509913
[1040]	train-mlogloss:0.353007	test-mlogloss:0.509873
[1050]	train-mlogloss:0.351679	test-mlogloss:0.509732
[1060]	train-mlogloss:0.350332	test-mlogloss:0.509697
[1070]	train-mlogloss:0.349067	test-mlogloss:0.509641
[1080]	train-mlogloss:0.347816	test-mlogloss:0.509628
[1090]	train-mlogloss:0.346688	test-mlogloss:0.50958
[1100]	train-mlogloss:0.345392	test-mlogloss:0.509475
[1110]	train-mlogloss:0.34414	test-mlogloss:0.509456
[1120]	train-mlogloss:0.342959	test-mlogloss:0.509421
[1130]	train-mlogloss:0.341838	test-mlogloss:0.509375
[1140]	train-mlogloss:0.340646	test-mlogloss:0.509398
[1150]	train-mlogloss:0.339491	test-mlogloss:0.509446
[1160]	train-mlogloss:0.338368	test-mlogloss:0.509361
[1170]	train-mlogloss:0.337234	test-mlogloss:0.509361
[1180]	train-mlogloss:0.336155	test-mlogloss:0.5093
[1190]	train-mlogloss:0.335098	test-mlogloss:0.509334
[1200]	train-mlogloss:0.333829	test-mlogloss:0.509274
[1210]	train-mlogloss:0.332686	test-mlogloss:0.50925
[1220]	train-mlogloss:0.331619	test-mlogloss:0.509188
[1230]	train-mlogloss:0.330469	test-mlogloss:0.509095
[1240]	train-mlogloss:0.329323	test-mlogloss:0.509113
[1250]	train-mlogloss:0.328212	test-mlogloss:0.509109
[1260]	train-mlogloss:0.327062	test-mlogloss:0.50907
[1270]	train-mlogloss:0.325915	test-mlogloss:0.50911
[1280]	train-mlogloss:0.324731	test-mlogloss:0.509145
[1290]	train-mlogloss:0.323593	test-mlogloss:0.50913
[1300]	train-mlogloss:0.322538	test-mlogloss:0.509103
[1310]	train-mlogloss:0.321477	test-mlogloss:0.509039
[1320]	train-mlogloss:0.32044	test-mlogloss:0.509092
[1330]	train-mlogloss:0.319377	test-mlogloss:0.509056
[1340]	train-mlogloss:0.318224	test-mlogloss:0.509061
[1350]	train-mlogloss:0.317133	test-mlogloss:0.509046
[1360]	train-mlogloss:0.316067	test-mlogloss:0.509089
Stopping. Best iteration:
[1311]	train-mlogloss:0.321386	test-mlogloss:0.509025

[0.50593702950354136, 0.50092266559477416, 0.51430512114807325, 0.50902529081389458]
[0]	train-mlogloss:1.08405	test-mlogloss:1.08432
Multiple eval metrics have been passed: 'test-mlogloss' will be used for early stopping.

Will train until test-mlogloss hasn't improved in 50 rounds.
[10]	train-mlogloss:0.961148	test-mlogloss:0.964277
[20]	train-mlogloss:0.870388	test-mlogloss:0.876137
[30]	train-mlogloss:0.799929	test-mlogloss:0.808079
[40]	train-mlogloss:0.745274	test-mlogloss:0.755759
[50]	train-mlogloss:0.701664	test-mlogloss:0.714193
[60]	train-mlogloss:0.666117	test-mlogloss:0.680796
[70]	train-mlogloss:0.637518	test-mlogloss:0.654233
[80]	train-mlogloss:0.613946	test-mlogloss:0.632519
[90]	train-mlogloss:0.594514	test-mlogloss:0.615022
[100]	train-mlogloss:0.578386	test-mlogloss:0.600683
[110]	train-mlogloss:0.564983	test-mlogloss:0.588987
[120]	train-mlogloss:0.553199	test-mlogloss:0.57908
[130]	train-mlogloss:0.543236	test-mlogloss:0.570872
[140]	train-mlogloss:0.534648	test-mlogloss:0.563973
[150]	train-mlogloss:0.527059	test-mlogloss:0.558108
[160]	train-mlogloss:0.520401	test-mlogloss:0.553104
[170]	train-mlogloss:0.51441	test-mlogloss:0.548745
[180]	train-mlogloss:0.508854	test-mlogloss:0.54499
[190]	train-mlogloss:0.503967	test-mlogloss:0.541846
[200]	train-mlogloss:0.499391	test-mlogloss:0.538993
[210]	train-mlogloss:0.495349	test-mlogloss:0.536484
[220]	train-mlogloss:0.491474	test-mlogloss:0.534225
[230]	train-mlogloss:0.487973	test-mlogloss:0.532259
[240]	train-mlogloss:0.484443	test-mlogloss:0.530472
[250]	train-mlogloss:0.481059	test-mlogloss:0.528791
[260]	train-mlogloss:0.478067	test-mlogloss:0.527331
[270]	train-mlogloss:0.47528	test-mlogloss:0.525971
[280]	train-mlogloss:0.472403	test-mlogloss:0.524694
[290]	train-mlogloss:0.469659	test-mlogloss:0.52362
[300]	train-mlogloss:0.467	test-mlogloss:0.522562
[310]	train-mlogloss:0.464453	test-mlogloss:0.521599
[320]	train-mlogloss:0.461996	test-mlogloss:0.520706
[330]	train-mlogloss:0.459635	test-mlogloss:0.519778
[340]	train-mlogloss:0.457318	test-mlogloss:0.518949
[350]	train-mlogloss:0.455159	test-mlogloss:0.518217
[360]	train-mlogloss:0.452899	test-mlogloss:0.517581
[370]	train-mlogloss:0.450727	test-mlogloss:0.516949
[380]	train-mlogloss:0.4485	test-mlogloss:0.51638
[390]	train-mlogloss:0.446427	test-mlogloss:0.515797
[400]	train-mlogloss:0.4444	test-mlogloss:0.515357
[410]	train-mlogloss:0.442599	test-mlogloss:0.514848
[420]	train-mlogloss:0.440708	test-mlogloss:0.514396
[430]	train-mlogloss:0.438908	test-mlogloss:0.513933
[440]	train-mlogloss:0.437118	test-mlogloss:0.513568
[450]	train-mlogloss:0.435411	test-mlogloss:0.513185
[460]	train-mlogloss:0.433461	test-mlogloss:0.512785
[470]	train-mlogloss:0.431748	test-mlogloss:0.512469
[480]	train-mlogloss:0.430045	test-mlogloss:0.512195
[490]	train-mlogloss:0.428418	test-mlogloss:0.511887
[500]	train-mlogloss:0.426728	test-mlogloss:0.511558
[510]	train-mlogloss:0.425126	test-mlogloss:0.51113
[520]	train-mlogloss:0.423411	test-mlogloss:0.510837
[530]	train-mlogloss:0.42174	test-mlogloss:0.510637
[540]	train-mlogloss:0.420415	test-mlogloss:0.510376
[550]	train-mlogloss:0.418793	test-mlogloss:0.510196
[560]	train-mlogloss:0.417318	test-mlogloss:0.510013
[570]	train-mlogloss:0.415622	test-mlogloss:0.509769
[580]	train-mlogloss:0.414179	test-mlogloss:0.509632
[590]	train-mlogloss:0.412664	test-mlogloss:0.509426
[600]	train-mlogloss:0.411249	test-mlogloss:0.509284
[610]	train-mlogloss:0.409839	test-mlogloss:0.509089
[620]	train-mlogloss:0.408413	test-mlogloss:0.508921
[630]	train-mlogloss:0.406751	test-mlogloss:0.508711
[640]	train-mlogloss:0.405029	test-mlogloss:0.508519
[650]	train-mlogloss:0.403562	test-mlogloss:0.508368
[660]	train-mlogloss:0.402038	test-mlogloss:0.508143
[670]	train-mlogloss:0.400452	test-mlogloss:0.508012
[680]	train-mlogloss:0.398977	test-mlogloss:0.507924
[690]	train-mlogloss:0.397624	test-mlogloss:0.507816
[700]	train-mlogloss:0.396184	test-mlogloss:0.50768
[710]	train-mlogloss:0.394663	test-mlogloss:0.507574
[720]	train-mlogloss:0.393239	test-mlogloss:0.507513
[730]	train-mlogloss:0.39196	test-mlogloss:0.50738
[740]	train-mlogloss:0.390526	test-mlogloss:0.507355
[750]	train-mlogloss:0.389126	test-mlogloss:0.507252
[760]	train-mlogloss:0.387768	test-mlogloss:0.507015
[770]	train-mlogloss:0.386351	test-mlogloss:0.506904
[780]	train-mlogloss:0.384966	test-mlogloss:0.506823
[790]	train-mlogloss:0.383476	test-mlogloss:0.506765
[800]	train-mlogloss:0.382162	test-mlogloss:0.506666
[810]	train-mlogloss:0.380807	test-mlogloss:0.506575
[820]	train-mlogloss:0.379504	test-mlogloss:0.506471
[830]	train-mlogloss:0.378234	test-mlogloss:0.506423
[840]	train-mlogloss:0.376869	test-mlogloss:0.506346
[850]	train-mlogloss:0.375649	test-mlogloss:0.506169
[860]	train-mlogloss:0.374279	test-mlogloss:0.506064
[870]	train-mlogloss:0.373012	test-mlogloss:0.505987
[880]	train-mlogloss:0.371773	test-mlogloss:0.505902
[890]	train-mlogloss:0.37053	test-mlogloss:0.505824
[900]	train-mlogloss:0.369293	test-mlogloss:0.505776
[910]	train-mlogloss:0.36807	test-mlogloss:0.505698
[920]	train-mlogloss:0.366628	test-mlogloss:0.505693
[930]	train-mlogloss:0.365295	test-mlogloss:0.505618
[940]	train-mlogloss:0.364057	test-mlogloss:0.505495
[950]	train-mlogloss:0.362753	test-mlogloss:0.50545
[960]	train-mlogloss:0.361483	test-mlogloss:0.505398
[970]	train-mlogloss:0.36021	test-mlogloss:0.505307
[980]	train-mlogloss:0.358903	test-mlogloss:0.505296
[990]	train-mlogloss:0.357645	test-mlogloss:0.50522
[1000]	train-mlogloss:0.356376	test-mlogloss:0.505091
[1010]	train-mlogloss:0.355204	test-mlogloss:0.505102
[1020]	train-mlogloss:0.354033	test-mlogloss:0.505038
[1030]	train-mlogloss:0.352771	test-mlogloss:0.505047
[1040]	train-mlogloss:0.351625	test-mlogloss:0.504996
[1050]	train-mlogloss:0.35047	test-mlogloss:0.504953
[1060]	train-mlogloss:0.349202	test-mlogloss:0.504896
[1070]	train-mlogloss:0.347961	test-mlogloss:0.504809
[1080]	train-mlogloss:0.34674	test-mlogloss:0.504774
[1090]	train-mlogloss:0.345405	test-mlogloss:0.504734
[1100]	train-mlogloss:0.344086	test-mlogloss:0.504713
[1110]	train-mlogloss:0.343056	test-mlogloss:0.504675
[1120]	train-mlogloss:0.341795	test-mlogloss:0.504682
[1130]	train-mlogloss:0.340678	test-mlogloss:0.504642
[1140]	train-mlogloss:0.33961	test-mlogloss:0.504606
[1150]	train-mlogloss:0.33834	test-mlogloss:0.504601
[1160]	train-mlogloss:0.337054	test-mlogloss:0.504505
[1170]	train-mlogloss:0.335963	test-mlogloss:0.504455
[1180]	train-mlogloss:0.334808	test-mlogloss:0.504395
[1190]	train-mlogloss:0.333714	test-mlogloss:0.504358
[1200]	train-mlogloss:0.332564	test-mlogloss:0.504277
[1210]	train-mlogloss:0.331427	test-mlogloss:0.504271
[1220]	train-mlogloss:0.330265	test-mlogloss:0.504225
[1230]	train-mlogloss:0.329095	test-mlogloss:0.504201
[1240]	train-mlogloss:0.328079	test-mlogloss:0.504183
[1250]	train-mlogloss:0.326966	test-mlogloss:0.504196
[1260]	train-mlogloss:0.325917	test-mlogloss:0.504156
[1270]	train-mlogloss:0.324883	test-mlogloss:0.504157
[1280]	train-mlogloss:0.323703	test-mlogloss:0.50413
[1290]	train-mlogloss:0.322628	test-mlogloss:0.504104
[1300]	train-mlogloss:0.32155	test-mlogloss:0.504137
[1310]	train-mlogloss:0.320482	test-mlogloss:0.504109
[1320]	train-mlogloss:0.319325	test-mlogloss:0.504116
[1330]	train-mlogloss:0.318167	test-mlogloss:0.50412
[1340]	train-mlogloss:0.317055	test-mlogloss:0.50409
[1350]	train-mlogloss:0.315926	test-mlogloss:0.50407
[1360]	train-mlogloss:0.314961	test-mlogloss:0.504101
[1370]	train-mlogloss:0.313805	test-mlogloss:0.504064
[1380]	train-mlogloss:0.312716	test-mlogloss:0.504014
[1390]	train-mlogloss:0.311662	test-mlogloss:0.504086
[1400]	train-mlogloss:0.310658	test-mlogloss:0.504106
[1410]	train-mlogloss:0.309635	test-mlogloss:0.504036
[1420]	train-mlogloss:0.308494	test-mlogloss:0.504061
Stopping. Best iteration:
[1379]	train-mlogloss:0.312806	test-mlogloss:0.504009

[0.50593702950354136, 0.50092266559477416, 0.51430512114807325, 0.50902529081389458, 0.50400881779857165]
0.50683995706

In [14]:
dfs3 = run3_to_stackdf(rv3)
pickle.dump(dfs3, open('modeloutput-xgb-clf.pkl', 'wb'))

In [16]:
def run_to_stackdf(run):
    df_testpreds = pd.DataFrame(run[2].mean(axis=0))
    df_testpreds.columns = ['level']
    df_testpreds['listing_id'] = cv_test[0].listing_id
    df_allpreds = pd.concat([run[1][['level', 'listing_id']], df_testpreds])

    df_allpreds.sort_values('listing_id', inplace=True)
    df_allpreds.set_index('listing_id', inplace=True)

    df_fold = []
    for f in range(run[2].shape[0]):
        df_fold.append(pd.DataFrame(run[2][f]))
        df_fold[-1]['listing_id'] = test_df.listing_id
        df_fold[-1].sort_values('listing_id', inplace=True)
        df_fold[-1].set_index('listing_id', inplace=True)

    return (df_allpreds, df_fold)

In [17]:
def runXGB1(train_X, train_y, test_X, test_y=None, feature_names=None, seed_val=0, num_rounds=4000):
    param = {}
    param['objective'] = 'reg:logistic'
    #param['tree_method'] = 'hist'
    param['eta'] = 0.02
    param['max_depth'] = 6
    param['silent'] = 1
    param['num_class'] = 1
    param['eval_metric'] = "rmse"
    param['min_child_weight'] = 1
    param['subsample'] = 0.7
    param['colsample_bytree'] = 0.7
    param['seed'] = seed_val
    param['base_score'] = train_y.mean()
    num_rounds = num_rounds

    plst = list(param.items())
    xgtrain = xgb.DMatrix(train_X, label=train_y)

    if test_y is not None:
        xgtest = xgb.DMatrix(test_X, label=test_y)
        watchlist = [ (xgtrain,'train'), (xgtest, 'test') ]
        model = xgb.train(plst, xgtrain, num_rounds, watchlist, early_stopping_rounds=50, verbose_eval=10)
    else:
        xgtest = xgb.DMatrix(test_X)
        model = xgb.train(plst, xgtrain, num_rounds)

    pred_test_y = model.predict(xgtest, ntree_limit=model.best_ntree_limit)
    return pred_test_y, model

In [18]:
medium_regression_tgt = (.5 + (9/13)) / 2

def run_cv1(train_df, cv_test, kf, features_to_use):
    
    train_X = train_df[features_to_use] #sparse.hstack([train_df[features_to_use], tr_sparse]).tocsr()
    train_y3 = np.array(train_df['interest_level'].apply(lambda x: target_num_map[x]))
    
    train_y = np.zeros_like(train_y3, dtype=np.float32)
    train_y[train_y3 == 1] = medium_regression_tgt
    train_y[train_y3 == 2] = 1

    cv_preds = []
    cv_scores = []
    models = []
    test_preds = []
    
    fold = 0

    for dev_index, val_index in kf.split(range(train_X.shape[0]), train_y):

        dev_X, val_X = train_X.iloc[dev_index], train_X.iloc[val_index]
        dev_y, val_y = train_y[dev_index], train_y[val_index]
        preds, model = runXGB1(dev_X, dev_y, val_X, val_y)
        models.append(model)

        cv_scores.append(model.best_score)
        print(cv_scores)

        cut_df = train_df.iloc[val_index]
        
        out_df = pd.DataFrame(preds)
        out_df.columns = ["level"]
        out_df["listing_id"] = cut_df.listing_id.values
        out_df['interest_tgt'] = val_y # cut_df.interest.values

        cv_preds.append(out_df)

        xgtest = xgb.DMatrix(cv_test[fold][features_to_use])
        test_preds.append(model.predict(xgtest, ntree_limit=model.best_ntree_limit))

    df_cv = pd.concat(cv_preds)
    print(np.sqrt(sklearn.metrics.mean_squared_error(df_cv.interest_tgt, df_cv.level)))
    
    apreds = np.array(test_preds)
    
    return models, df_cv, apreds

In [19]:
rv1 = run_cv1(train_df, cv_test, kf, fl)


[0]	train-rmse:0.334483	test-rmse:0.334523
Multiple eval metrics have been passed: 'test-rmse' will be used for early stopping.

Will train until test-rmse hasn't improved in 50 rounds.
[10]	train-rmse:0.313981	test-rmse:0.314755
[20]	train-rmse:0.298413	test-rmse:0.299804
[30]	train-rmse:0.286126	test-rmse:0.288265
[40]	train-rmse:0.27618	test-rmse:0.278983
[50]	train-rmse:0.268765	test-rmse:0.272163
[60]	train-rmse:0.263497	test-rmse:0.267494
[70]	train-rmse:0.258939	test-rmse:0.263398
[80]	train-rmse:0.255294	test-rmse:0.260178
[90]	train-rmse:0.252345	test-rmse:0.257657
[100]	train-rmse:0.249963	test-rmse:0.255729
[110]	train-rmse:0.247915	test-rmse:0.254029
[120]	train-rmse:0.246146	test-rmse:0.252676
[130]	train-rmse:0.244603	test-rmse:0.251512
[140]	train-rmse:0.243268	test-rmse:0.250504
[150]	train-rmse:0.24198	test-rmse:0.249626
[160]	train-rmse:0.240862	test-rmse:0.248845
[170]	train-rmse:0.239945	test-rmse:0.24829
[180]	train-rmse:0.239108	test-rmse:0.247768
[190]	train-rmse:0.238226	test-rmse:0.247241
[200]	train-rmse:0.237444	test-rmse:0.246773
[210]	train-rmse:0.236599	test-rmse:0.246365
[220]	train-rmse:0.235836	test-rmse:0.245984
[230]	train-rmse:0.235163	test-rmse:0.245642
[240]	train-rmse:0.234549	test-rmse:0.245253
[250]	train-rmse:0.234051	test-rmse:0.244997
[260]	train-rmse:0.233433	test-rmse:0.244676
[270]	train-rmse:0.232805	test-rmse:0.244348
[280]	train-rmse:0.23223	test-rmse:0.244067
[290]	train-rmse:0.231591	test-rmse:0.243783
[300]	train-rmse:0.231055	test-rmse:0.243615
[310]	train-rmse:0.230575	test-rmse:0.243391
[320]	train-rmse:0.230193	test-rmse:0.24325
[330]	train-rmse:0.229752	test-rmse:0.243115
[340]	train-rmse:0.229248	test-rmse:0.242932
[350]	train-rmse:0.22862	test-rmse:0.242735
[360]	train-rmse:0.228082	test-rmse:0.242579
[370]	train-rmse:0.227556	test-rmse:0.242427
[380]	train-rmse:0.227041	test-rmse:0.242281
[390]	train-rmse:0.226469	test-rmse:0.242152
[400]	train-rmse:0.226004	test-rmse:0.242029
[410]	train-rmse:0.225458	test-rmse:0.241906
[420]	train-rmse:0.225053	test-rmse:0.24179
[430]	train-rmse:0.2247	test-rmse:0.241691
[440]	train-rmse:0.224174	test-rmse:0.241576
[450]	train-rmse:0.223844	test-rmse:0.241462
[460]	train-rmse:0.223416	test-rmse:0.241332
[470]	train-rmse:0.222986	test-rmse:0.241247
[480]	train-rmse:0.22256	test-rmse:0.241138
[490]	train-rmse:0.222141	test-rmse:0.241033
[500]	train-rmse:0.221695	test-rmse:0.240937
[510]	train-rmse:0.221309	test-rmse:0.240904
[520]	train-rmse:0.220911	test-rmse:0.240853
[530]	train-rmse:0.220537	test-rmse:0.240776
[540]	train-rmse:0.220113	test-rmse:0.240683
[550]	train-rmse:0.219797	test-rmse:0.240613
[560]	train-rmse:0.219423	test-rmse:0.240556
[570]	train-rmse:0.219002	test-rmse:0.240497
[580]	train-rmse:0.218586	test-rmse:0.240518
[590]	train-rmse:0.218216	test-rmse:0.240468
[600]	train-rmse:0.217754	test-rmse:0.24044
[610]	train-rmse:0.2174	test-rmse:0.240383
[620]	train-rmse:0.217061	test-rmse:0.240368
[630]	train-rmse:0.216666	test-rmse:0.240329
[640]	train-rmse:0.21609	test-rmse:0.240249
[650]	train-rmse:0.215752	test-rmse:0.240199
[660]	train-rmse:0.215344	test-rmse:0.240116
[670]	train-rmse:0.214947	test-rmse:0.240081
[680]	train-rmse:0.214533	test-rmse:0.240076
[690]	train-rmse:0.214053	test-rmse:0.240014
[700]	train-rmse:0.213651	test-rmse:0.239954
[710]	train-rmse:0.213213	test-rmse:0.239939
[720]	train-rmse:0.212875	test-rmse:0.239904
[730]	train-rmse:0.212428	test-rmse:0.239866
[740]	train-rmse:0.212036	test-rmse:0.239798
[750]	train-rmse:0.21167	test-rmse:0.239784
[760]	train-rmse:0.211328	test-rmse:0.239752
[770]	train-rmse:0.210822	test-rmse:0.239657
[780]	train-rmse:0.210405	test-rmse:0.239635
[790]	train-rmse:0.210003	test-rmse:0.23961
[800]	train-rmse:0.209626	test-rmse:0.239582
[810]	train-rmse:0.209218	test-rmse:0.239522
[820]	train-rmse:0.208853	test-rmse:0.239451
[830]	train-rmse:0.208481	test-rmse:0.239438
[840]	train-rmse:0.208112	test-rmse:0.239438
[850]	train-rmse:0.20768	test-rmse:0.239424
[860]	train-rmse:0.20721	test-rmse:0.239371
[870]	train-rmse:0.206819	test-rmse:0.239336
[880]	train-rmse:0.206463	test-rmse:0.239331
[890]	train-rmse:0.206086	test-rmse:0.239296
[900]	train-rmse:0.20567	test-rmse:0.239274
[910]	train-rmse:0.205222	test-rmse:0.239273
[920]	train-rmse:0.204798	test-rmse:0.239228
[930]	train-rmse:0.204459	test-rmse:0.239177
[940]	train-rmse:0.204056	test-rmse:0.239163
[950]	train-rmse:0.203791	test-rmse:0.239151
[960]	train-rmse:0.20341	test-rmse:0.239139
[970]	train-rmse:0.203068	test-rmse:0.23913
[980]	train-rmse:0.202755	test-rmse:0.239128
[990]	train-rmse:0.202401	test-rmse:0.239134
[1000]	train-rmse:0.202022	test-rmse:0.239099
[1010]	train-rmse:0.201671	test-rmse:0.239077
[1020]	train-rmse:0.201297	test-rmse:0.239042
[1030]	train-rmse:0.200882	test-rmse:0.239009
[1040]	train-rmse:0.200538	test-rmse:0.238979
[1050]	train-rmse:0.20023	test-rmse:0.238976
[1060]	train-rmse:0.199887	test-rmse:0.238969
[1070]	train-rmse:0.199542	test-rmse:0.238966
[1080]	train-rmse:0.199245	test-rmse:0.238951
[1090]	train-rmse:0.198883	test-rmse:0.238949
[1100]	train-rmse:0.198543	test-rmse:0.238929
[1110]	train-rmse:0.198145	test-rmse:0.238901
[1120]	train-rmse:0.197799	test-rmse:0.23889
[1130]	train-rmse:0.197478	test-rmse:0.238891
[1140]	train-rmse:0.197172	test-rmse:0.238882
[1150]	train-rmse:0.196827	test-rmse:0.238867
[1160]	train-rmse:0.196511	test-rmse:0.238845
[1170]	train-rmse:0.196203	test-rmse:0.238844
[1180]	train-rmse:0.195804	test-rmse:0.238806
[1190]	train-rmse:0.195452	test-rmse:0.238804
[1200]	train-rmse:0.195128	test-rmse:0.238787
[1210]	train-rmse:0.194898	test-rmse:0.23877
[1220]	train-rmse:0.19462	test-rmse:0.23877
[1230]	train-rmse:0.19425	test-rmse:0.23876
[1240]	train-rmse:0.193837	test-rmse:0.238764
[1250]	train-rmse:0.193554	test-rmse:0.23875
[1260]	train-rmse:0.193216	test-rmse:0.238734
[1270]	train-rmse:0.192863	test-rmse:0.238713
[1280]	train-rmse:0.192541	test-rmse:0.238686
[1290]	train-rmse:0.19224	test-rmse:0.238706
[1300]	train-rmse:0.191902	test-rmse:0.238699
[1310]	train-rmse:0.191547	test-rmse:0.238687
[1320]	train-rmse:0.191207	test-rmse:0.238699
[1330]	train-rmse:0.190867	test-rmse:0.238674
[1340]	train-rmse:0.190543	test-rmse:0.238697
[1350]	train-rmse:0.19021	test-rmse:0.238722
Stopping. Best iteration:
[1306]	train-rmse:0.191675	test-rmse:0.23867

[0.23867]
[0]	train-rmse:0.334444	test-rmse:0.334482
Multiple eval metrics have been passed: 'test-rmse' will be used for early stopping.

Will train until test-rmse hasn't improved in 50 rounds.
[10]	train-rmse:0.314009	test-rmse:0.314302
[20]	train-rmse:0.29865	test-rmse:0.299383
[30]	train-rmse:0.286515	test-rmse:0.287706
[40]	train-rmse:0.276692	test-rmse:0.278343
[50]	train-rmse:0.269259	test-rmse:0.271424
[60]	train-rmse:0.263948	test-rmse:0.266573
[70]	train-rmse:0.259423	test-rmse:0.262534
[80]	train-rmse:0.255648	test-rmse:0.259232
[90]	train-rmse:0.252714	test-rmse:0.256699
[100]	train-rmse:0.250313	test-rmse:0.254746
[110]	train-rmse:0.248252	test-rmse:0.253013
[120]	train-rmse:0.246426	test-rmse:0.2516
[130]	train-rmse:0.244977	test-rmse:0.250547
[140]	train-rmse:0.243685	test-rmse:0.249656
[150]	train-rmse:0.242434	test-rmse:0.248801
[160]	train-rmse:0.24138	test-rmse:0.248135
[170]	train-rmse:0.240373	test-rmse:0.247471
[180]	train-rmse:0.239466	test-rmse:0.246904
[190]	train-rmse:0.238567	test-rmse:0.246384
[200]	train-rmse:0.237834	test-rmse:0.245912
[210]	train-rmse:0.237071	test-rmse:0.245512
[220]	train-rmse:0.23633	test-rmse:0.245181
[230]	train-rmse:0.235568	test-rmse:0.244802
[240]	train-rmse:0.234966	test-rmse:0.244548
[250]	train-rmse:0.234411	test-rmse:0.244266
[260]	train-rmse:0.23384	test-rmse:0.244025
[270]	train-rmse:0.23315	test-rmse:0.243746
[280]	train-rmse:0.232607	test-rmse:0.243591
[290]	train-rmse:0.23215	test-rmse:0.243435
[300]	train-rmse:0.231709	test-rmse:0.24328
[310]	train-rmse:0.231247	test-rmse:0.243121
[320]	train-rmse:0.230556	test-rmse:0.24292
[330]	train-rmse:0.230005	test-rmse:0.242696
[340]	train-rmse:0.229587	test-rmse:0.24257
[350]	train-rmse:0.229054	test-rmse:0.242365
[360]	train-rmse:0.228613	test-rmse:0.242236
[370]	train-rmse:0.228095	test-rmse:0.242124
[380]	train-rmse:0.227577	test-rmse:0.242012
[390]	train-rmse:0.227129	test-rmse:0.24193
[400]	train-rmse:0.226683	test-rmse:0.241793
[410]	train-rmse:0.226268	test-rmse:0.24166
[420]	train-rmse:0.225689	test-rmse:0.241536
[430]	train-rmse:0.225299	test-rmse:0.241478
[440]	train-rmse:0.224903	test-rmse:0.241388
[450]	train-rmse:0.22453	test-rmse:0.241337
[460]	train-rmse:0.22412	test-rmse:0.241271
[470]	train-rmse:0.223724	test-rmse:0.241238
[480]	train-rmse:0.223292	test-rmse:0.241159
[490]	train-rmse:0.222847	test-rmse:0.241088
[500]	train-rmse:0.222408	test-rmse:0.241004
[510]	train-rmse:0.221997	test-rmse:0.240956
[520]	train-rmse:0.221471	test-rmse:0.24088
[530]	train-rmse:0.221058	test-rmse:0.240827
[540]	train-rmse:0.220684	test-rmse:0.240778
[550]	train-rmse:0.220265	test-rmse:0.240726
[560]	train-rmse:0.21985	test-rmse:0.240677
[570]	train-rmse:0.219473	test-rmse:0.240635
[580]	train-rmse:0.219011	test-rmse:0.24056
[590]	train-rmse:0.218551	test-rmse:0.240522
[600]	train-rmse:0.218209	test-rmse:0.240479
[610]	train-rmse:0.217864	test-rmse:0.240449
[620]	train-rmse:0.217445	test-rmse:0.240364
[630]	train-rmse:0.217025	test-rmse:0.240317
[640]	train-rmse:0.216707	test-rmse:0.240261
[650]	train-rmse:0.216305	test-rmse:0.240232
[660]	train-rmse:0.215833	test-rmse:0.240199
[670]	train-rmse:0.21535	test-rmse:0.24016
[680]	train-rmse:0.214941	test-rmse:0.240112
[690]	train-rmse:0.214476	test-rmse:0.240027
[700]	train-rmse:0.214152	test-rmse:0.240012
[710]	train-rmse:0.213775	test-rmse:0.239964
[720]	train-rmse:0.213309	test-rmse:0.239887
[730]	train-rmse:0.212949	test-rmse:0.239825
[740]	train-rmse:0.212543	test-rmse:0.239813
[750]	train-rmse:0.212021	test-rmse:0.239767
[760]	train-rmse:0.211638	test-rmse:0.23974
[770]	train-rmse:0.211327	test-rmse:0.239707
[780]	train-rmse:0.210901	test-rmse:0.239672
[790]	train-rmse:0.210468	test-rmse:0.239638
[800]	train-rmse:0.210018	test-rmse:0.239612
[810]	train-rmse:0.20972	test-rmse:0.239614
[820]	train-rmse:0.209364	test-rmse:0.239586
[830]	train-rmse:0.209002	test-rmse:0.239582
[840]	train-rmse:0.20859	test-rmse:0.239524
[850]	train-rmse:0.208195	test-rmse:0.239506
[860]	train-rmse:0.207729	test-rmse:0.239453
[870]	train-rmse:0.207425	test-rmse:0.239453
[880]	train-rmse:0.207028	test-rmse:0.239411
[890]	train-rmse:0.206586	test-rmse:0.239398
[900]	train-rmse:0.206177	test-rmse:0.239371
[910]	train-rmse:0.205838	test-rmse:0.239337
[920]	train-rmse:0.205439	test-rmse:0.239287
[930]	train-rmse:0.205135	test-rmse:0.239275
[940]	train-rmse:0.204841	test-rmse:0.239265
[950]	train-rmse:0.204532	test-rmse:0.239238
[960]	train-rmse:0.204161	test-rmse:0.239202
[970]	train-rmse:0.203753	test-rmse:0.239179
[980]	train-rmse:0.203341	test-rmse:0.239165
[990]	train-rmse:0.202985	test-rmse:0.239134
[1000]	train-rmse:0.202564	test-rmse:0.239097
[1010]	train-rmse:0.202203	test-rmse:0.239079
[1020]	train-rmse:0.201787	test-rmse:0.23902
[1030]	train-rmse:0.201375	test-rmse:0.238989
[1040]	train-rmse:0.201004	test-rmse:0.23897
[1050]	train-rmse:0.200536	test-rmse:0.238907
[1060]	train-rmse:0.200218	test-rmse:0.238891
[1070]	train-rmse:0.199877	test-rmse:0.238858
[1080]	train-rmse:0.199547	test-rmse:0.238856
[1090]	train-rmse:0.199239	test-rmse:0.238836
[1100]	train-rmse:0.198892	test-rmse:0.238819
[1110]	train-rmse:0.198542	test-rmse:0.238781
[1120]	train-rmse:0.198167	test-rmse:0.238755
[1130]	train-rmse:0.197792	test-rmse:0.238745
[1140]	train-rmse:0.197487	test-rmse:0.238734
[1150]	train-rmse:0.197169	test-rmse:0.238716
[1160]	train-rmse:0.1968	test-rmse:0.238742
[1170]	train-rmse:0.196465	test-rmse:0.238753
[1180]	train-rmse:0.196098	test-rmse:0.23874
[1190]	train-rmse:0.195799	test-rmse:0.23872
[1200]	train-rmse:0.195396	test-rmse:0.238659
[1210]	train-rmse:0.194992	test-rmse:0.238683
[1220]	train-rmse:0.194694	test-rmse:0.238642
[1230]	train-rmse:0.194385	test-rmse:0.238571
[1240]	train-rmse:0.193979	test-rmse:0.23854
[1250]	train-rmse:0.193663	test-rmse:0.238571
[1260]	train-rmse:0.193329	test-rmse:0.238569
[1270]	train-rmse:0.192998	test-rmse:0.238547
[1280]	train-rmse:0.192619	test-rmse:0.238563
Stopping. Best iteration:
[1236]	train-rmse:0.194106	test-rmse:0.238535

[0.23867, 0.238535]
[0]	train-rmse:0.334355	test-rmse:0.334517
Multiple eval metrics have been passed: 'test-rmse' will be used for early stopping.

Will train until test-rmse hasn't improved in 50 rounds.
[10]	train-rmse:0.313581	test-rmse:0.315226
[20]	train-rmse:0.297931	test-rmse:0.300946
[30]	train-rmse:0.285549	test-rmse:0.28983
[40]	train-rmse:0.27555	test-rmse:0.281061
[50]	train-rmse:0.268036	test-rmse:0.274553
[60]	train-rmse:0.262654	test-rmse:0.270064
[70]	train-rmse:0.258044	test-rmse:0.266257
[80]	train-rmse:0.254322	test-rmse:0.263194
[90]	train-rmse:0.251342	test-rmse:0.260813
[100]	train-rmse:0.248932	test-rmse:0.25896
[110]	train-rmse:0.246899	test-rmse:0.257375
[120]	train-rmse:0.245153	test-rmse:0.256146
[130]	train-rmse:0.243755	test-rmse:0.255189
[140]	train-rmse:0.242443	test-rmse:0.254311
[150]	train-rmse:0.241359	test-rmse:0.25358
[160]	train-rmse:0.240135	test-rmse:0.252845
[170]	train-rmse:0.239209	test-rmse:0.252306
[180]	train-rmse:0.238309	test-rmse:0.25176
[190]	train-rmse:0.237472	test-rmse:0.251311
[200]	train-rmse:0.236738	test-rmse:0.250877
[210]	train-rmse:0.236011	test-rmse:0.250471
[220]	train-rmse:0.235337	test-rmse:0.250091
[230]	train-rmse:0.234523	test-rmse:0.249709
[240]	train-rmse:0.233764	test-rmse:0.249345
[250]	train-rmse:0.233216	test-rmse:0.249088
[260]	train-rmse:0.232734	test-rmse:0.248857
[270]	train-rmse:0.232187	test-rmse:0.248614
[280]	train-rmse:0.231739	test-rmse:0.248407
[290]	train-rmse:0.231221	test-rmse:0.248212
[300]	train-rmse:0.230633	test-rmse:0.248022
[310]	train-rmse:0.23012	test-rmse:0.247845
[320]	train-rmse:0.229665	test-rmse:0.247673
[330]	train-rmse:0.229275	test-rmse:0.247509
[340]	train-rmse:0.228803	test-rmse:0.247329
[350]	train-rmse:0.228296	test-rmse:0.247207
[360]	train-rmse:0.22785	test-rmse:0.247046
[370]	train-rmse:0.227422	test-rmse:0.24691
[380]	train-rmse:0.226846	test-rmse:0.246779
[390]	train-rmse:0.226414	test-rmse:0.246665
[400]	train-rmse:0.225914	test-rmse:0.246563
[410]	train-rmse:0.225467	test-rmse:0.246443
[420]	train-rmse:0.225064	test-rmse:0.246352
[430]	train-rmse:0.224484	test-rmse:0.246271
[440]	train-rmse:0.22402	test-rmse:0.246163
[450]	train-rmse:0.22357	test-rmse:0.246078
[460]	train-rmse:0.22319	test-rmse:0.246019
[470]	train-rmse:0.222886	test-rmse:0.245944
[480]	train-rmse:0.222472	test-rmse:0.245844
[490]	train-rmse:0.221993	test-rmse:0.245709
[500]	train-rmse:0.221558	test-rmse:0.245584
[510]	train-rmse:0.221146	test-rmse:0.245537
[520]	train-rmse:0.220813	test-rmse:0.245513
[530]	train-rmse:0.220426	test-rmse:0.245436
[540]	train-rmse:0.219943	test-rmse:0.245306
[550]	train-rmse:0.219413	test-rmse:0.245201
[560]	train-rmse:0.218905	test-rmse:0.245116
[570]	train-rmse:0.218457	test-rmse:0.245059
[580]	train-rmse:0.218082	test-rmse:0.244991
[590]	train-rmse:0.217682	test-rmse:0.244913
[600]	train-rmse:0.217291	test-rmse:0.244819
[610]	train-rmse:0.216879	test-rmse:0.244741
[620]	train-rmse:0.216541	test-rmse:0.244694
[630]	train-rmse:0.216074	test-rmse:0.244605
[640]	train-rmse:0.215724	test-rmse:0.244582
[650]	train-rmse:0.215159	test-rmse:0.244541
[660]	train-rmse:0.214836	test-rmse:0.244497
[670]	train-rmse:0.214357	test-rmse:0.244423
[680]	train-rmse:0.213935	test-rmse:0.244348
[690]	train-rmse:0.213573	test-rmse:0.244309
[700]	train-rmse:0.213132	test-rmse:0.244275
[710]	train-rmse:0.212816	test-rmse:0.244216
[720]	train-rmse:0.212419	test-rmse:0.244199
[730]	train-rmse:0.212007	test-rmse:0.244164
[740]	train-rmse:0.211602	test-rmse:0.244105
[750]	train-rmse:0.21124	test-rmse:0.244059
[760]	train-rmse:0.210831	test-rmse:0.244035
[770]	train-rmse:0.210403	test-rmse:0.24399
[780]	train-rmse:0.210058	test-rmse:0.243971
[790]	train-rmse:0.209694	test-rmse:0.243925
[800]	train-rmse:0.209366	test-rmse:0.243879
[810]	train-rmse:0.20903	test-rmse:0.243852
[820]	train-rmse:0.208761	test-rmse:0.243836
[830]	train-rmse:0.208366	test-rmse:0.243806
[840]	train-rmse:0.207894	test-rmse:0.243793
[850]	train-rmse:0.207527	test-rmse:0.243764
[860]	train-rmse:0.207192	test-rmse:0.243766
[870]	train-rmse:0.206848	test-rmse:0.243749
[880]	train-rmse:0.206492	test-rmse:0.243691
[890]	train-rmse:0.206111	test-rmse:0.243676
[900]	train-rmse:0.205776	test-rmse:0.243654
[910]	train-rmse:0.205321	test-rmse:0.243615
[920]	train-rmse:0.20496	test-rmse:0.243607
[930]	train-rmse:0.204518	test-rmse:0.243542
[940]	train-rmse:0.204219	test-rmse:0.243533
[950]	train-rmse:0.203872	test-rmse:0.243513
[960]	train-rmse:0.203496	test-rmse:0.243471
[970]	train-rmse:0.203182	test-rmse:0.243461
[980]	train-rmse:0.202839	test-rmse:0.243443
[990]	train-rmse:0.202437	test-rmse:0.243428
[1000]	train-rmse:0.202052	test-rmse:0.243415
[1010]	train-rmse:0.201645	test-rmse:0.24335
[1020]	train-rmse:0.201275	test-rmse:0.243324
[1030]	train-rmse:0.200907	test-rmse:0.243337
[1040]	train-rmse:0.200557	test-rmse:0.243305
[1050]	train-rmse:0.200209	test-rmse:0.243263
[1060]	train-rmse:0.199884	test-rmse:0.243218
[1070]	train-rmse:0.199571	test-rmse:0.243186
[1080]	train-rmse:0.19922	test-rmse:0.243187
[1090]	train-rmse:0.198829	test-rmse:0.243171
[1100]	train-rmse:0.19846	test-rmse:0.243131
[1110]	train-rmse:0.198125	test-rmse:0.243098
[1120]	train-rmse:0.19772	test-rmse:0.24305
[1130]	train-rmse:0.197411	test-rmse:0.24305
[1140]	train-rmse:0.197058	test-rmse:0.243038
[1150]	train-rmse:0.196677	test-rmse:0.242983
[1160]	train-rmse:0.196303	test-rmse:0.242963
[1170]	train-rmse:0.195949	test-rmse:0.242972
[1180]	train-rmse:0.195638	test-rmse:0.242968
[1190]	train-rmse:0.195233	test-rmse:0.242945
[1200]	train-rmse:0.194914	test-rmse:0.242942
[1210]	train-rmse:0.194584	test-rmse:0.242948
[1220]	train-rmse:0.19432	test-rmse:0.242886
[1230]	train-rmse:0.194014	test-rmse:0.242937
[1240]	train-rmse:0.193599	test-rmse:0.242907
[1250]	train-rmse:0.19321	test-rmse:0.242956
[1260]	train-rmse:0.192901	test-rmse:0.242952
[1270]	train-rmse:0.192551	test-rmse:0.242917
Stopping. Best iteration:
[1220]	train-rmse:0.19432	test-rmse:0.242886

[0.23867, 0.238535, 0.242886]
[0]	train-rmse:0.334393	test-rmse:0.334486
Multiple eval metrics have been passed: 'test-rmse' will be used for early stopping.

Will train until test-rmse hasn't improved in 50 rounds.
[10]	train-rmse:0.313851	test-rmse:0.314847
[20]	train-rmse:0.29814	test-rmse:0.300012
[30]	train-rmse:0.285874	test-rmse:0.28859
[40]	train-rmse:0.275913	test-rmse:0.279404
[50]	train-rmse:0.268549	test-rmse:0.272886
[60]	train-rmse:0.263192	test-rmse:0.268111
[70]	train-rmse:0.258552	test-rmse:0.264107
[80]	train-rmse:0.254887	test-rmse:0.261068
[90]	train-rmse:0.251843	test-rmse:0.2586
[100]	train-rmse:0.249513	test-rmse:0.256832
[110]	train-rmse:0.247415	test-rmse:0.255267
[120]	train-rmse:0.245641	test-rmse:0.253958
[130]	train-rmse:0.244156	test-rmse:0.252879
[140]	train-rmse:0.24277	test-rmse:0.25187
[150]	train-rmse:0.241578	test-rmse:0.251124
[160]	train-rmse:0.240431	test-rmse:0.250437
[170]	train-rmse:0.239512	test-rmse:0.249842
[180]	train-rmse:0.238672	test-rmse:0.249306
[190]	train-rmse:0.237816	test-rmse:0.248844
[200]	train-rmse:0.237011	test-rmse:0.248346
[210]	train-rmse:0.236307	test-rmse:0.24794
[220]	train-rmse:0.235674	test-rmse:0.247569
[230]	train-rmse:0.234936	test-rmse:0.247227
[240]	train-rmse:0.234303	test-rmse:0.246887
[250]	train-rmse:0.233742	test-rmse:0.246595
[260]	train-rmse:0.233263	test-rmse:0.246299
[270]	train-rmse:0.232718	test-rmse:0.246052
[280]	train-rmse:0.231995	test-rmse:0.245763
[290]	train-rmse:0.231427	test-rmse:0.245498
[300]	train-rmse:0.231001	test-rmse:0.245303
[310]	train-rmse:0.230494	test-rmse:0.245107
[320]	train-rmse:0.229973	test-rmse:0.2449
[330]	train-rmse:0.229496	test-rmse:0.244714
[340]	train-rmse:0.22901	test-rmse:0.244524
[350]	train-rmse:0.228551	test-rmse:0.244405
[360]	train-rmse:0.228079	test-rmse:0.244299
[370]	train-rmse:0.227565	test-rmse:0.244137
[380]	train-rmse:0.227047	test-rmse:0.243954
[390]	train-rmse:0.226562	test-rmse:0.243814
[400]	train-rmse:0.226063	test-rmse:0.243683
[410]	train-rmse:0.225511	test-rmse:0.243636
[420]	train-rmse:0.225084	test-rmse:0.243483
[430]	train-rmse:0.224689	test-rmse:0.243417
[440]	train-rmse:0.224412	test-rmse:0.243357
[450]	train-rmse:0.223968	test-rmse:0.243266
[460]	train-rmse:0.22364	test-rmse:0.243187
[470]	train-rmse:0.223211	test-rmse:0.243094
[480]	train-rmse:0.222887	test-rmse:0.243022
[490]	train-rmse:0.222512	test-rmse:0.24295
[500]	train-rmse:0.221957	test-rmse:0.242865
[510]	train-rmse:0.22144	test-rmse:0.242732
[520]	train-rmse:0.220969	test-rmse:0.242675
[530]	train-rmse:0.22056	test-rmse:0.242614
[540]	train-rmse:0.220238	test-rmse:0.242565
[550]	train-rmse:0.219814	test-rmse:0.242477
[560]	train-rmse:0.219402	test-rmse:0.242387
[570]	train-rmse:0.218977	test-rmse:0.242336
[580]	train-rmse:0.218508	test-rmse:0.242277
[590]	train-rmse:0.218064	test-rmse:0.242233
[600]	train-rmse:0.217666	test-rmse:0.242163
[610]	train-rmse:0.217245	test-rmse:0.242088
[620]	train-rmse:0.216914	test-rmse:0.242058
[630]	train-rmse:0.216545	test-rmse:0.242019
[640]	train-rmse:0.216144	test-rmse:0.241985
[650]	train-rmse:0.215685	test-rmse:0.241958
[660]	train-rmse:0.215262	test-rmse:0.241865
[670]	train-rmse:0.21493	test-rmse:0.241794
[680]	train-rmse:0.214539	test-rmse:0.241746
[690]	train-rmse:0.214096	test-rmse:0.241717
[700]	train-rmse:0.213709	test-rmse:0.241668
[710]	train-rmse:0.213342	test-rmse:0.24159
[720]	train-rmse:0.212997	test-rmse:0.241571
[730]	train-rmse:0.212586	test-rmse:0.241524
[740]	train-rmse:0.212228	test-rmse:0.241489
[750]	train-rmse:0.211862	test-rmse:0.241438
[760]	train-rmse:0.211473	test-rmse:0.241393
[770]	train-rmse:0.211005	test-rmse:0.241352
[780]	train-rmse:0.210726	test-rmse:0.241316
[790]	train-rmse:0.210261	test-rmse:0.241278
[800]	train-rmse:0.209954	test-rmse:0.241235
[810]	train-rmse:0.209601	test-rmse:0.241219
[820]	train-rmse:0.209273	test-rmse:0.241174
[830]	train-rmse:0.20894	test-rmse:0.241116
[840]	train-rmse:0.208549	test-rmse:0.241084
[850]	train-rmse:0.208165	test-rmse:0.241058
[860]	train-rmse:0.207829	test-rmse:0.241039
[870]	train-rmse:0.207569	test-rmse:0.241027
[880]	train-rmse:0.207214	test-rmse:0.24101
[890]	train-rmse:0.20687	test-rmse:0.240963
[900]	train-rmse:0.206535	test-rmse:0.240947
[910]	train-rmse:0.206099	test-rmse:0.240916
[920]	train-rmse:0.205783	test-rmse:0.240892
[930]	train-rmse:0.205445	test-rmse:0.240834
[940]	train-rmse:0.205022	test-rmse:0.240768
[950]	train-rmse:0.204663	test-rmse:0.240722
[960]	train-rmse:0.204289	test-rmse:0.240706
[970]	train-rmse:0.203911	test-rmse:0.240693
[980]	train-rmse:0.203496	test-rmse:0.240663
[990]	train-rmse:0.203214	test-rmse:0.240665
[1000]	train-rmse:0.202917	test-rmse:0.240626
[1010]	train-rmse:0.202523	test-rmse:0.240591
[1020]	train-rmse:0.202124	test-rmse:0.240579
[1030]	train-rmse:0.201661	test-rmse:0.240543
[1040]	train-rmse:0.201334	test-rmse:0.240501
[1050]	train-rmse:0.200931	test-rmse:0.240497
[1060]	train-rmse:0.200537	test-rmse:0.240513
[1070]	train-rmse:0.200164	test-rmse:0.240522
[1080]	train-rmse:0.19975	test-rmse:0.240511
[1090]	train-rmse:0.199314	test-rmse:0.240478
[1100]	train-rmse:0.199004	test-rmse:0.24049
[1110]	train-rmse:0.198659	test-rmse:0.24046
[1120]	train-rmse:0.1983	test-rmse:0.240474
[1130]	train-rmse:0.197921	test-rmse:0.240473
[1140]	train-rmse:0.197658	test-rmse:0.240456
[1150]	train-rmse:0.197349	test-rmse:0.240462
[1160]	train-rmse:0.197004	test-rmse:0.240456
[1170]	train-rmse:0.196703	test-rmse:0.240439
[1180]	train-rmse:0.196366	test-rmse:0.240432
[1190]	train-rmse:0.196035	test-rmse:0.240415
[1200]	train-rmse:0.195645	test-rmse:0.240395
[1210]	train-rmse:0.195331	test-rmse:0.240346
[1220]	train-rmse:0.194958	test-rmse:0.240331
[1230]	train-rmse:0.194678	test-rmse:0.240329
[1240]	train-rmse:0.194351	test-rmse:0.240325
[1250]	train-rmse:0.193997	test-rmse:0.240332
[1260]	train-rmse:0.193677	test-rmse:0.240326
[1270]	train-rmse:0.193371	test-rmse:0.24032
[1280]	train-rmse:0.193032	test-rmse:0.240321
[1290]	train-rmse:0.192623	test-rmse:0.240288
[1300]	train-rmse:0.192408	test-rmse:0.240295
[1310]	train-rmse:0.192048	test-rmse:0.240308
[1320]	train-rmse:0.191704	test-rmse:0.240268
[1330]	train-rmse:0.191354	test-rmse:0.24028
[1340]	train-rmse:0.191095	test-rmse:0.240252
[1350]	train-rmse:0.190776	test-rmse:0.240268
[1360]	train-rmse:0.190453	test-rmse:0.240265
[1370]	train-rmse:0.190197	test-rmse:0.24025
[1380]	train-rmse:0.189842	test-rmse:0.240244
[1390]	train-rmse:0.18945	test-rmse:0.240222
[1400]	train-rmse:0.189083	test-rmse:0.240164
[1410]	train-rmse:0.188683	test-rmse:0.240161
[1420]	train-rmse:0.188304	test-rmse:0.240155
[1430]	train-rmse:0.187896	test-rmse:0.240156
[1440]	train-rmse:0.187597	test-rmse:0.240156
[1450]	train-rmse:0.187271	test-rmse:0.240125
[1460]	train-rmse:0.186931	test-rmse:0.24011
[1470]	train-rmse:0.186682	test-rmse:0.240111
[1480]	train-rmse:0.186453	test-rmse:0.240102
[1490]	train-rmse:0.186138	test-rmse:0.240112
[1500]	train-rmse:0.185857	test-rmse:0.240091
[1510]	train-rmse:0.185507	test-rmse:0.240105
[1520]	train-rmse:0.185196	test-rmse:0.240068
[1530]	train-rmse:0.184912	test-rmse:0.240088
[1540]	train-rmse:0.184567	test-rmse:0.240093
[1550]	train-rmse:0.184302	test-rmse:0.240101
[1560]	train-rmse:0.184062	test-rmse:0.240093
[1570]	train-rmse:0.1838	test-rmse:0.240083
Stopping. Best iteration:
[1520]	train-rmse:0.185196	test-rmse:0.240068

[0.23867, 0.238535, 0.242886, 0.240068]
[0]	train-rmse:0.334012	test-rmse:0.334011
Multiple eval metrics have been passed: 'test-rmse' will be used for early stopping.

Will train until test-rmse hasn't improved in 50 rounds.
[10]	train-rmse:0.312643	test-rmse:0.313244
[20]	train-rmse:0.296718	test-rmse:0.297891
[30]	train-rmse:0.284602	test-rmse:0.286402
[40]	train-rmse:0.275684	test-rmse:0.278018
[50]	train-rmse:0.268425	test-rmse:0.271298
[60]	train-rmse:0.26303	test-rmse:0.266401
[70]	train-rmse:0.258664	test-rmse:0.262518
[80]	train-rmse:0.255124	test-rmse:0.259486
[90]	train-rmse:0.252251	test-rmse:0.257054
[100]	train-rmse:0.249845	test-rmse:0.25511
[110]	train-rmse:0.247879	test-rmse:0.253532
[120]	train-rmse:0.246198	test-rmse:0.252219
[130]	train-rmse:0.244809	test-rmse:0.251142
[140]	train-rmse:0.243533	test-rmse:0.250182
[150]	train-rmse:0.24242	test-rmse:0.249386
[160]	train-rmse:0.241294	test-rmse:0.248613
[170]	train-rmse:0.240291	test-rmse:0.247963
[180]	train-rmse:0.239185	test-rmse:0.24729
[190]	train-rmse:0.238349	test-rmse:0.24676
[200]	train-rmse:0.237562	test-rmse:0.246288
[210]	train-rmse:0.236748	test-rmse:0.245821
[220]	train-rmse:0.235907	test-rmse:0.245368
[230]	train-rmse:0.23516	test-rmse:0.244967
[240]	train-rmse:0.234596	test-rmse:0.244644
[250]	train-rmse:0.234027	test-rmse:0.244346
[260]	train-rmse:0.233457	test-rmse:0.244069
[270]	train-rmse:0.232895	test-rmse:0.243826
[280]	train-rmse:0.232246	test-rmse:0.243577
[290]	train-rmse:0.2317	test-rmse:0.243359
[300]	train-rmse:0.231227	test-rmse:0.243205
[310]	train-rmse:0.230587	test-rmse:0.242972
[320]	train-rmse:0.230094	test-rmse:0.242787
[330]	train-rmse:0.229676	test-rmse:0.242652
[340]	train-rmse:0.229301	test-rmse:0.242521
[350]	train-rmse:0.228822	test-rmse:0.24233
[360]	train-rmse:0.228145	test-rmse:0.242155
[370]	train-rmse:0.227728	test-rmse:0.242088
[380]	train-rmse:0.227254	test-rmse:0.241975
[390]	train-rmse:0.226841	test-rmse:0.241831
[400]	train-rmse:0.226335	test-rmse:0.241688
[410]	train-rmse:0.225815	test-rmse:0.241556
[420]	train-rmse:0.225372	test-rmse:0.241474
[430]	train-rmse:0.22489	test-rmse:0.241345
[440]	train-rmse:0.224408	test-rmse:0.241273
[450]	train-rmse:0.223914	test-rmse:0.24114
[460]	train-rmse:0.223524	test-rmse:0.241053
[470]	train-rmse:0.223008	test-rmse:0.240978
[480]	train-rmse:0.222649	test-rmse:0.240897
[490]	train-rmse:0.222225	test-rmse:0.240785
[500]	train-rmse:0.221816	test-rmse:0.240689
[510]	train-rmse:0.221391	test-rmse:0.240636
[520]	train-rmse:0.220963	test-rmse:0.240554
[530]	train-rmse:0.220544	test-rmse:0.240472
[540]	train-rmse:0.219895	test-rmse:0.240372
[550]	train-rmse:0.219452	test-rmse:0.240342
[560]	train-rmse:0.218971	test-rmse:0.2403
[570]	train-rmse:0.218544	test-rmse:0.240226
[580]	train-rmse:0.218025	test-rmse:0.240122
[590]	train-rmse:0.21766	test-rmse:0.240094
[600]	train-rmse:0.217289	test-rmse:0.24003
[610]	train-rmse:0.216899	test-rmse:0.239974
[620]	train-rmse:0.216568	test-rmse:0.239932
[630]	train-rmse:0.216164	test-rmse:0.239888
[640]	train-rmse:0.215685	test-rmse:0.239852
[650]	train-rmse:0.215387	test-rmse:0.239819
[660]	train-rmse:0.214943	test-rmse:0.239773
[670]	train-rmse:0.214496	test-rmse:0.239774
[680]	train-rmse:0.214141	test-rmse:0.239749
[690]	train-rmse:0.213744	test-rmse:0.23972
[700]	train-rmse:0.213339	test-rmse:0.239675
[710]	train-rmse:0.212912	test-rmse:0.239631
[720]	train-rmse:0.212587	test-rmse:0.239572
[730]	train-rmse:0.212268	test-rmse:0.239538
[740]	train-rmse:0.211847	test-rmse:0.239511
[750]	train-rmse:0.211474	test-rmse:0.239447
[760]	train-rmse:0.211124	test-rmse:0.239423
[770]	train-rmse:0.210711	test-rmse:0.239366
[780]	train-rmse:0.210306	test-rmse:0.239318
[790]	train-rmse:0.209908	test-rmse:0.239295
[800]	train-rmse:0.209606	test-rmse:0.23927
[810]	train-rmse:0.209174	test-rmse:0.239245
[820]	train-rmse:0.208823	test-rmse:0.239213
[830]	train-rmse:0.208438	test-rmse:0.23915
[840]	train-rmse:0.208079	test-rmse:0.23914
[850]	train-rmse:0.207745	test-rmse:0.239097
[860]	train-rmse:0.207419	test-rmse:0.239076
[870]	train-rmse:0.207104	test-rmse:0.239045
[880]	train-rmse:0.206799	test-rmse:0.239013
[890]	train-rmse:0.206335	test-rmse:0.238999
[900]	train-rmse:0.206004	test-rmse:0.238972
[910]	train-rmse:0.205737	test-rmse:0.238968
[920]	train-rmse:0.205385	test-rmse:0.238944
[930]	train-rmse:0.204918	test-rmse:0.23894
[940]	train-rmse:0.204545	test-rmse:0.238915
[950]	train-rmse:0.204191	test-rmse:0.238876
[960]	train-rmse:0.203753	test-rmse:0.238854
[970]	train-rmse:0.203484	test-rmse:0.238837
[980]	train-rmse:0.203081	test-rmse:0.238824
[990]	train-rmse:0.202626	test-rmse:0.238829
[1000]	train-rmse:0.202303	test-rmse:0.238806
[1010]	train-rmse:0.201927	test-rmse:0.238781
[1020]	train-rmse:0.201545	test-rmse:0.238767
[1030]	train-rmse:0.20116	test-rmse:0.238761
[1040]	train-rmse:0.200831	test-rmse:0.238745
[1050]	train-rmse:0.200488	test-rmse:0.238756
[1060]	train-rmse:0.20013	test-rmse:0.238711
[1070]	train-rmse:0.199789	test-rmse:0.23869
[1080]	train-rmse:0.199418	test-rmse:0.238651
[1090]	train-rmse:0.199008	test-rmse:0.23864
[1100]	train-rmse:0.198709	test-rmse:0.238607
[1110]	train-rmse:0.198325	test-rmse:0.238599
[1120]	train-rmse:0.197975	test-rmse:0.23859
[1130]	train-rmse:0.197607	test-rmse:0.238559
[1140]	train-rmse:0.197142	test-rmse:0.238531
[1150]	train-rmse:0.196776	test-rmse:0.238506
[1160]	train-rmse:0.196393	test-rmse:0.238562
[1170]	train-rmse:0.196051	test-rmse:0.238536
[1180]	train-rmse:0.195707	test-rmse:0.238507
[1190]	train-rmse:0.195344	test-rmse:0.238503
[1200]	train-rmse:0.195022	test-rmse:0.238494
[1210]	train-rmse:0.19471	test-rmse:0.238462
[1220]	train-rmse:0.194392	test-rmse:0.238451
[1230]	train-rmse:0.194038	test-rmse:0.238449
[1240]	train-rmse:0.193688	test-rmse:0.238425
[1250]	train-rmse:0.193387	test-rmse:0.238405
[1260]	train-rmse:0.193087	test-rmse:0.238409
[1270]	train-rmse:0.192741	test-rmse:0.238401
[1280]	train-rmse:0.192407	test-rmse:0.238391
[1290]	train-rmse:0.192062	test-rmse:0.238378
[1300]	train-rmse:0.191751	test-rmse:0.238365
[1310]	train-rmse:0.191382	test-rmse:0.238349
[1320]	train-rmse:0.191169	test-rmse:0.238346
[1330]	train-rmse:0.190812	test-rmse:0.238354
[1340]	train-rmse:0.19044	test-rmse:0.238337
[1350]	train-rmse:0.190126	test-rmse:0.238337
[1360]	train-rmse:0.18986	test-rmse:0.23836
[1370]	train-rmse:0.189526	test-rmse:0.238339
[1380]	train-rmse:0.189064	test-rmse:0.238284
[1390]	train-rmse:0.188768	test-rmse:0.238272
[1400]	train-rmse:0.188471	test-rmse:0.238237
[1410]	train-rmse:0.188196	test-rmse:0.238244
[1420]	train-rmse:0.187886	test-rmse:0.23822
[1430]	train-rmse:0.187567	test-rmse:0.238199
[1440]	train-rmse:0.187225	test-rmse:0.238237
[1450]	train-rmse:0.186947	test-rmse:0.23826
[1460]	train-rmse:0.1866	test-rmse:0.238256
[1470]	train-rmse:0.186307	test-rmse:0.238256
Stopping. Best iteration:
[1425]	train-rmse:0.18776	test-rmse:0.238196

[0.23867, 0.238535, 0.242886, 0.240068, 0.238196]
0.239677

In [20]:
dfs1 = run_to_stackdf(rv1)
pickle.dump(dfs1, open('modeloutput-xgb-reg.pkl', 'wb'))

In [ ]: