In [1]:
import os
import sys
import operator
import numpy as np
import pandas as pd
from scipy import sparse
import xgboost as xgb
import random
from sklearn import model_selection, preprocessing, ensemble
from sklearn.metrics import log_loss
from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer

import pickle

import sklearn.cluster

import Levenshtein

from multiprocessing import Pool

In [2]:
train_df = pd.read_pickle('fin-dprep-train.pkl')
test_df = pd.read_pickle('fin-dprep-test.pkl')

features_to_use = pickle.load(open('fin-dprep-flist.pkl', 'rb'))

medium_price = pd.read_pickle('fin-medium-price.pkl')

train_df = pd.merge(train_df, medium_price, left_on='listing_id', right_index=True)
test_df = pd.merge(test_df, medium_price, left_on='listing_id', right_index=True)

In [3]:
adams = pd.read_pickle('features-adams.pkl')

train_df = pd.merge(train_df, adams, left_on='listing_id', right_index=True)
test_df = pd.merge(test_df, adams, left_on='listing_id', right_index=True)

In [4]:
train_df["predicted_price_diff"] = np.log(train_df["price"]) - np.log(train_df["predicted_price"])
test_df["predicted_price_diff"] = np.log(test_df["price"]) - np.log(test_df["predicted_price"])

In [5]:
class MeansProcessor:
    def __init__(self, key, outkey = None, tgt = 'interest_cat'):
        self.key = key
        self.outkey = key if outkey is None else outkey
        
        self.count = {}
        self.means = {}
        self.std = {}
        self.global_means = 0
        
        self.tgt = tgt
        
        self.outkeys = [self.outkey + '_level', self.outkey + '_level_std']
        
    def fit(self, df):
        self.global_means = df[self.tgt].mean()
            
        for k in df.groupby(self.key, sort=False):
            
            self.count[k[0]] = len(k[1])

            if len(k[1]) < 0:
                self.means[k[0]] = np.nan
                self.std[k[0]] = np.nan
            else:
                self.means[k[0]] = np.mean(k[1][self.tgt])
                self.std[k[0]] = np.std(k[1][self.tgt])
            
    def predict(self, df):
        for l in self.outkeys:
            df[l] = np.nan # self.global_means[l]
            
        df[self.outkey + '_count'] = 0
            
        for k in df.groupby(self.key, sort=False):
            if k[0] == 0:
                continue
            
            if k[0] in self.means:
                df.loc[k[1].index, self.outkey + '_count'] = self.count[k[0]]
                df.loc[k[1].index, self.outkey + '_level'] = self.means[k[0]]
                df.loc[k[1].index, self.outkey + '_level_std'] = self.std[k[0]]
        
        return df
    
    def get_features(self):
        return self.outkeys.copy() + [self.outkey + '_count']

# i kept the same index randomization (with fixed seed) so I could validate this code against
# the original...

target_num_map = {'low':0, 'medium':1, 'high':2}
train_y = np.array(train_df['interest_level'].apply(lambda x: target_num_map[x]))

def proc_fold(fold):
    train_index = fold[0]
    test_index = fold[1]
    
    cv_train = train_df.iloc[train_index]
    cv_valid = train_df.iloc[test_index][['interest_level', 'manager_id', 'building_id']]
    cv_test = test_df.copy()
    
    m_build = MeansProcessor('building_id', 'building_sort')
    m_build.fit(cv_train)
    cv_valid = m_build.predict(cv_valid)
    cv_test = m_build.predict(cv_test)

    m_mgr = MeansProcessor('manager_id', 'manager_sort')
    m_mgr.fit(cv_train)
    cv_valid = m_mgr.predict(cv_valid)
    cv_test = m_mgr.predict(cv_test)

    m_comb = MeansProcessor(['building_id', 'manager_id'], 'mb_comb')
    m_comb.fit(cv_train)
    cv_valid = m_comb.predict(cv_valid)
    cv_test = m_comb.predict(cv_test)

    return cv_train, cv_valid, cv_test

kf = model_selection.StratifiedKFold(n_splits=5, shuffle=True, random_state=2016)
folds = [(k[0], k[1]) for k in kf.split(list(range(train_df.shape[0])), train_y)]

#with Pool(5) as pool:
#    rv = pool.map(proc_fold, folds)

import pickle

try:
    rv = pickle.load(open('0420-model-groupfeatures.pkl', 'rb'))
except:
    with Pool(5) as pool:
        rv = pool.map(proc_fold, folds)

        pickle.dump(rv, open('0420-model-groupfeatures.pkl', 'wb'))

# dummies to get feature id's
m_build = MeansProcessor('building_id', 'building_sort')
m_mgr = MeansProcessor('manager_id', 'manager_sort')
m_comb = MeansProcessor(['building_id', 'manager_id'], 'mb_comb')

group_features = m_build.get_features() + m_mgr.get_features() + m_comb.get_features()

cv_test = []
for r in rv:
    cv_test.append(test_df.merge(r[2][group_features], left_index=True, right_index=True))

cv_allvalid = pd.concat([r[1] for r in rv])

train_df = train_df.merge(cv_allvalid[group_features], left_index=True, right_index=True)

In [6]:
train_ids = []
val_ids = []

for dev_index, val_index in kf.split(range(train_df.shape[0]), train_df.interest_cat):
    train_ids.append(train_df.iloc[dev_index].listing_id.values)
    val_ids.append(train_df.iloc[val_index].listing_id.values)

In [7]:
adams_features = ['num_rot15_X', 'num_rot15_Y', 'num_rot30_X', 'num_rot30_Y', 'num_rot45_X', 'num_rot45_Y', 'num_rot60_X', 'num_rot60_Y', 'num_rho', 'num_phi', 'num_cap_share', 'num_nr_of_lines', 'num_redacted', 'num_email', 'num_phone_nr']

In [8]:
#fl = features_to_use + m_build.get_features() + m_mgr.get_features() + m_comb.get_features() + tfidf_fn

fl = features_to_use.copy() + group_features + adams_features.copy()

#fl.remove('price')
#fl.remove('price_t')
#fl.remove('price_per_room')
fl.append('predicted_price')
fl.append('predicted_price_diff')

fl.append('manager_lazy_rate')

fl.append('density_exp01')

In [9]:
def run3_to_stackdf(run):
    
    df_testpreds3 = pd.DataFrame(run[2].mean(axis=0))
    df_testpreds3.columns = ['low', 'medium', 'high']
    df_testpreds3['listing_id'] = test_df.listing_id

    df_allpreds3 = pd.concat([run[1][['low', 'medium', 'high', 'listing_id']], df_testpreds3])

    df_allpreds3.sort_values('listing_id', inplace=True)
    df_allpreds3.set_index('listing_id', inplace=True)
    
    df_fold = []
    for f in range(run[2].shape[0]):
        df_fold.append(pd.DataFrame(run[2][f]))
        df_fold[-1]['listing_id'] = test_df.listing_id
        df_fold[-1].sort_values('listing_id', inplace=True)
        df_fold[-1].set_index('listing_id', inplace=True)

    return (df_allpreds3, df_fold)

In [11]:
def runXGB(train_X, train_y, test_X, test_y=None, feature_names=None, seed_val=0, num_rounds=4000):
    param = {}
    param['objective'] = 'multi:softprob'
    #param['tree_method'] = 'hist'
    param['eta'] = 0.02
    param['max_depth'] = 6
    param['silent'] = 1
    param['num_class'] = 3
    param['eval_metric'] = "mlogloss"
    param['min_child_weight'] = 1
    param['subsample'] = 0.7
    param['colsample_bytree'] = 0.7
    param['seed'] = seed_val
    #param['base_score'] = [np.mean(train_y == i) for i in [0, 1, 2]]
    num_rounds = num_rounds

    plst = list(param.items())
    xgtrain = xgb.DMatrix(train_X, label=train_y)

    if test_y is not None:
        xgtest = xgb.DMatrix(test_X, label=test_y)
        watchlist = [ (xgtrain,'train'), (xgtest, 'test') ]
        model = xgb.train(plst, xgtrain, num_rounds, watchlist, early_stopping_rounds=50, verbose_eval=10)
    else:
        xgtest = xgb.DMatrix(test_X)
        model = xgb.train(plst, xgtrain, num_rounds)

    pred_test_y = model.predict(xgtest, ntree_limit=model.best_ntree_limit)
    return pred_test_y, model

In [12]:
def run_cv(train_df, cv_test, kf, features_to_use):
    train_X = train_df[features_to_use]
    train_y = np.array(train_df['interest_level'].apply(lambda x: target_num_map[x]))

    cv_preds = []
    cv_scores = []
    models = []
    test_preds = []
    
    fold = 0

    for dev_index, val_index in kf.split(range(train_X.shape[0]), train_y):

        dev_X, val_X = train_X.iloc[dev_index], train_X.iloc[val_index]
        dev_y, val_y = train_y[dev_index], train_y[val_index]
        preds, model = runXGB(dev_X, dev_y, val_X, val_y)
        models.append(model)

        cv_scores.append(log_loss(val_y, preds))
        print(cv_scores)

        cut_df = train_df.iloc[val_index]
        out_df = pd.DataFrame(preds)
        out_df.columns = ["low", "medium", "high"]
        out_df["listing_id"] = cut_df.listing_id.values
        interest = cut_df.interest_level.apply(lambda x: target_num_map[x])
        out_df['interest_tgt'] = interest.values

        cv_preds.append(out_df)

        xgtest = xgb.DMatrix(cv_test[fold][features_to_use])
        test_preds.append(model.predict(xgtest, ntree_limit=model.best_ntree_limit))

    df_cv = pd.concat(cv_preds)
    print(log_loss(df_cv.interest_tgt, df_cv[['low', 'medium', 'high']]))

    apreds = np.array(test_preds)
    
    return models, df_cv, apreds

In [13]:
rv3 = run_cv(train_df, cv_test, kf, fl)


[0]	train-mlogloss:1.08432	test-mlogloss:1.08449
Multiple eval metrics have been passed: 'test-mlogloss' will be used for early stopping.

Will train until test-mlogloss hasn't improved in 50 rounds.
[10]	train-mlogloss:0.962911	test-mlogloss:0.965201
[20]	train-mlogloss:0.871901	test-mlogloss:0.876265
[30]	train-mlogloss:0.801269	test-mlogloss:0.808002
[40]	train-mlogloss:0.746058	test-mlogloss:0.755017
[50]	train-mlogloss:0.701655	test-mlogloss:0.712867
[60]	train-mlogloss:0.666381	test-mlogloss:0.679916
[70]	train-mlogloss:0.637632	test-mlogloss:0.653404
[80]	train-mlogloss:0.614002	test-mlogloss:0.63193
[90]	train-mlogloss:0.594618	test-mlogloss:0.614589
[100]	train-mlogloss:0.578171	test-mlogloss:0.600221
[110]	train-mlogloss:0.564296	test-mlogloss:0.588338
[120]	train-mlogloss:0.552398	test-mlogloss:0.578562
[130]	train-mlogloss:0.542329	test-mlogloss:0.570419
[140]	train-mlogloss:0.533656	test-mlogloss:0.563634
[150]	train-mlogloss:0.525917	test-mlogloss:0.557837
[160]	train-mlogloss:0.519254	test-mlogloss:0.553052
[170]	train-mlogloss:0.513068	test-mlogloss:0.548835
[180]	train-mlogloss:0.5074	test-mlogloss:0.54515
[190]	train-mlogloss:0.502382	test-mlogloss:0.541992
[200]	train-mlogloss:0.497765	test-mlogloss:0.539186
[210]	train-mlogloss:0.493544	test-mlogloss:0.536689
[220]	train-mlogloss:0.489681	test-mlogloss:0.534644
[230]	train-mlogloss:0.485965	test-mlogloss:0.532692
[240]	train-mlogloss:0.48223	test-mlogloss:0.530797
[250]	train-mlogloss:0.478801	test-mlogloss:0.529234
[260]	train-mlogloss:0.475695	test-mlogloss:0.52765
[270]	train-mlogloss:0.472806	test-mlogloss:0.526353
[280]	train-mlogloss:0.469957	test-mlogloss:0.525113
[290]	train-mlogloss:0.467192	test-mlogloss:0.523861
[300]	train-mlogloss:0.464531	test-mlogloss:0.522819
[310]	train-mlogloss:0.461778	test-mlogloss:0.521725
[320]	train-mlogloss:0.459104	test-mlogloss:0.520878
[330]	train-mlogloss:0.456656	test-mlogloss:0.52005
[340]	train-mlogloss:0.454204	test-mlogloss:0.51914
[350]	train-mlogloss:0.452022	test-mlogloss:0.518418
[360]	train-mlogloss:0.449853	test-mlogloss:0.517831
[370]	train-mlogloss:0.447809	test-mlogloss:0.517095
[380]	train-mlogloss:0.445656	test-mlogloss:0.516553
[390]	train-mlogloss:0.443569	test-mlogloss:0.515948
[400]	train-mlogloss:0.441326	test-mlogloss:0.515298
[410]	train-mlogloss:0.439254	test-mlogloss:0.514825
[420]	train-mlogloss:0.437488	test-mlogloss:0.514265
[430]	train-mlogloss:0.435727	test-mlogloss:0.513739
[440]	train-mlogloss:0.43385	test-mlogloss:0.513236
[450]	train-mlogloss:0.432221	test-mlogloss:0.512899
[460]	train-mlogloss:0.430429	test-mlogloss:0.512526
[470]	train-mlogloss:0.428606	test-mlogloss:0.512198
[480]	train-mlogloss:0.426832	test-mlogloss:0.511727
[490]	train-mlogloss:0.425156	test-mlogloss:0.511378
[500]	train-mlogloss:0.423472	test-mlogloss:0.511027
[510]	train-mlogloss:0.421743	test-mlogloss:0.510755
[520]	train-mlogloss:0.420175	test-mlogloss:0.51039
[530]	train-mlogloss:0.418246	test-mlogloss:0.510085
[540]	train-mlogloss:0.416516	test-mlogloss:0.509708
[550]	train-mlogloss:0.414979	test-mlogloss:0.509406
[560]	train-mlogloss:0.413243	test-mlogloss:0.5092
[570]	train-mlogloss:0.411786	test-mlogloss:0.508892
[580]	train-mlogloss:0.41014	test-mlogloss:0.508692
[590]	train-mlogloss:0.408549	test-mlogloss:0.508433
[600]	train-mlogloss:0.406845	test-mlogloss:0.508255
[610]	train-mlogloss:0.405321	test-mlogloss:0.508132
[620]	train-mlogloss:0.403746	test-mlogloss:0.507912
[630]	train-mlogloss:0.402207	test-mlogloss:0.507705
[640]	train-mlogloss:0.400684	test-mlogloss:0.50744
[650]	train-mlogloss:0.399047	test-mlogloss:0.507206
[660]	train-mlogloss:0.397445	test-mlogloss:0.507072
[670]	train-mlogloss:0.396096	test-mlogloss:0.506859
[680]	train-mlogloss:0.394609	test-mlogloss:0.506688
[690]	train-mlogloss:0.393189	test-mlogloss:0.506565
[700]	train-mlogloss:0.391682	test-mlogloss:0.506393
[710]	train-mlogloss:0.390214	test-mlogloss:0.506242
[720]	train-mlogloss:0.388487	test-mlogloss:0.50606
[730]	train-mlogloss:0.387119	test-mlogloss:0.505918
[740]	train-mlogloss:0.385535	test-mlogloss:0.505676
[750]	train-mlogloss:0.384108	test-mlogloss:0.505562
[760]	train-mlogloss:0.382565	test-mlogloss:0.505403
[770]	train-mlogloss:0.380991	test-mlogloss:0.505313
[780]	train-mlogloss:0.379531	test-mlogloss:0.505252
[790]	train-mlogloss:0.378101	test-mlogloss:0.505164
[800]	train-mlogloss:0.37659	test-mlogloss:0.505102
[810]	train-mlogloss:0.375326	test-mlogloss:0.504999
[820]	train-mlogloss:0.373948	test-mlogloss:0.504893
[830]	train-mlogloss:0.372438	test-mlogloss:0.504823
[840]	train-mlogloss:0.371038	test-mlogloss:0.504783
[850]	train-mlogloss:0.369633	test-mlogloss:0.504791
[860]	train-mlogloss:0.368449	test-mlogloss:0.504693
[870]	train-mlogloss:0.36682	test-mlogloss:0.504605
[880]	train-mlogloss:0.365495	test-mlogloss:0.504461
[890]	train-mlogloss:0.364141	test-mlogloss:0.504335
[900]	train-mlogloss:0.362703	test-mlogloss:0.504256
[910]	train-mlogloss:0.361289	test-mlogloss:0.504174
[920]	train-mlogloss:0.35985	test-mlogloss:0.504157
[930]	train-mlogloss:0.358496	test-mlogloss:0.504123
[940]	train-mlogloss:0.35712	test-mlogloss:0.504123
[950]	train-mlogloss:0.35585	test-mlogloss:0.503947
[960]	train-mlogloss:0.354482	test-mlogloss:0.50386
[970]	train-mlogloss:0.353212	test-mlogloss:0.503693
[980]	train-mlogloss:0.352057	test-mlogloss:0.503661
[990]	train-mlogloss:0.35076	test-mlogloss:0.50362
[1000]	train-mlogloss:0.349441	test-mlogloss:0.503661
[1010]	train-mlogloss:0.348221	test-mlogloss:0.50358
[1020]	train-mlogloss:0.346936	test-mlogloss:0.503423
[1030]	train-mlogloss:0.345785	test-mlogloss:0.503333
[1040]	train-mlogloss:0.344463	test-mlogloss:0.503298
[1050]	train-mlogloss:0.343177	test-mlogloss:0.503327
[1060]	train-mlogloss:0.34201	test-mlogloss:0.503289
[1070]	train-mlogloss:0.34052	test-mlogloss:0.503162
[1080]	train-mlogloss:0.339406	test-mlogloss:0.503061
[1090]	train-mlogloss:0.338066	test-mlogloss:0.503007
[1100]	train-mlogloss:0.336862	test-mlogloss:0.502914
[1110]	train-mlogloss:0.335566	test-mlogloss:0.502852
[1120]	train-mlogloss:0.334394	test-mlogloss:0.502863
[1130]	train-mlogloss:0.333257	test-mlogloss:0.502818
[1140]	train-mlogloss:0.332067	test-mlogloss:0.502785
[1150]	train-mlogloss:0.33091	test-mlogloss:0.502732
[1160]	train-mlogloss:0.329743	test-mlogloss:0.502731
[1170]	train-mlogloss:0.328404	test-mlogloss:0.502662
[1180]	train-mlogloss:0.327137	test-mlogloss:0.502578
[1190]	train-mlogloss:0.325885	test-mlogloss:0.502564
[1200]	train-mlogloss:0.324808	test-mlogloss:0.502514
[1210]	train-mlogloss:0.323617	test-mlogloss:0.502475
[1220]	train-mlogloss:0.322482	test-mlogloss:0.502458
[1230]	train-mlogloss:0.321218	test-mlogloss:0.502528
[1240]	train-mlogloss:0.32008	test-mlogloss:0.502448
[1250]	train-mlogloss:0.318936	test-mlogloss:0.502387
[1260]	train-mlogloss:0.317765	test-mlogloss:0.502336
[1270]	train-mlogloss:0.316588	test-mlogloss:0.502435
[1280]	train-mlogloss:0.315452	test-mlogloss:0.502382
[1290]	train-mlogloss:0.314374	test-mlogloss:0.502434
[1300]	train-mlogloss:0.313293	test-mlogloss:0.502423
Stopping. Best iteration:
[1259]	train-mlogloss:0.317915	test-mlogloss:0.502319

[0.50231927851655944]
[0]	train-mlogloss:1.08439	test-mlogloss:1.08457
Multiple eval metrics have been passed: 'test-mlogloss' will be used for early stopping.

Will train until test-mlogloss hasn't improved in 50 rounds.
[10]	train-mlogloss:0.963619	test-mlogloss:0.965419
[20]	train-mlogloss:0.872794	test-mlogloss:0.876263
[30]	train-mlogloss:0.802338	test-mlogloss:0.807591
[40]	train-mlogloss:0.747317	test-mlogloss:0.75442
[50]	train-mlogloss:0.702878	test-mlogloss:0.71163
[60]	train-mlogloss:0.667767	test-mlogloss:0.678086
[70]	train-mlogloss:0.639129	test-mlogloss:0.651207
[80]	train-mlogloss:0.615568	test-mlogloss:0.629255
[90]	train-mlogloss:0.59613	test-mlogloss:0.61159
[100]	train-mlogloss:0.579654	test-mlogloss:0.596904
[110]	train-mlogloss:0.565763	test-mlogloss:0.584588
[120]	train-mlogloss:0.553995	test-mlogloss:0.574522
[130]	train-mlogloss:0.543846	test-mlogloss:0.566202
[140]	train-mlogloss:0.535004	test-mlogloss:0.559139
[150]	train-mlogloss:0.527273	test-mlogloss:0.553229
[160]	train-mlogloss:0.520444	test-mlogloss:0.548019
[170]	train-mlogloss:0.514467	test-mlogloss:0.543632
[180]	train-mlogloss:0.508843	test-mlogloss:0.539643
[190]	train-mlogloss:0.503855	test-mlogloss:0.536366
[200]	train-mlogloss:0.499355	test-mlogloss:0.53358
[210]	train-mlogloss:0.495045	test-mlogloss:0.530975
[220]	train-mlogloss:0.491196	test-mlogloss:0.52881
[230]	train-mlogloss:0.487471	test-mlogloss:0.526805
[240]	train-mlogloss:0.483995	test-mlogloss:0.524957
[250]	train-mlogloss:0.480824	test-mlogloss:0.523396
[260]	train-mlogloss:0.477903	test-mlogloss:0.521986
[270]	train-mlogloss:0.474995	test-mlogloss:0.520691
[280]	train-mlogloss:0.472122	test-mlogloss:0.519391
[290]	train-mlogloss:0.469268	test-mlogloss:0.518269
[300]	train-mlogloss:0.466706	test-mlogloss:0.517304
[310]	train-mlogloss:0.464328	test-mlogloss:0.51648
[320]	train-mlogloss:0.461877	test-mlogloss:0.515571
[330]	train-mlogloss:0.459321	test-mlogloss:0.514687
[340]	train-mlogloss:0.457074	test-mlogloss:0.513887
[350]	train-mlogloss:0.454664	test-mlogloss:0.513115
[360]	train-mlogloss:0.452316	test-mlogloss:0.512411
[370]	train-mlogloss:0.450227	test-mlogloss:0.511837
[380]	train-mlogloss:0.448259	test-mlogloss:0.511244
[390]	train-mlogloss:0.446219	test-mlogloss:0.510718
[400]	train-mlogloss:0.44415	test-mlogloss:0.510173
[410]	train-mlogloss:0.442232	test-mlogloss:0.509663
[420]	train-mlogloss:0.440249	test-mlogloss:0.509195
[430]	train-mlogloss:0.438356	test-mlogloss:0.508704
[440]	train-mlogloss:0.436526	test-mlogloss:0.508377
[450]	train-mlogloss:0.434651	test-mlogloss:0.507914
[460]	train-mlogloss:0.43274	test-mlogloss:0.507485
[470]	train-mlogloss:0.430954	test-mlogloss:0.507131
[480]	train-mlogloss:0.429222	test-mlogloss:0.506762
[490]	train-mlogloss:0.427387	test-mlogloss:0.506406
[500]	train-mlogloss:0.42556	test-mlogloss:0.506058
[510]	train-mlogloss:0.423956	test-mlogloss:0.505748
[520]	train-mlogloss:0.422301	test-mlogloss:0.505498
[530]	train-mlogloss:0.420497	test-mlogloss:0.505148
[540]	train-mlogloss:0.419006	test-mlogloss:0.504895
[550]	train-mlogloss:0.417292	test-mlogloss:0.504625
[560]	train-mlogloss:0.415726	test-mlogloss:0.504336
[570]	train-mlogloss:0.414203	test-mlogloss:0.504106
[580]	train-mlogloss:0.41246	test-mlogloss:0.503762
[590]	train-mlogloss:0.410893	test-mlogloss:0.503582
[600]	train-mlogloss:0.409182	test-mlogloss:0.503361
[610]	train-mlogloss:0.40772	test-mlogloss:0.503231
[620]	train-mlogloss:0.406125	test-mlogloss:0.503052
[630]	train-mlogloss:0.404573	test-mlogloss:0.502829
[640]	train-mlogloss:0.403036	test-mlogloss:0.502635
[650]	train-mlogloss:0.401523	test-mlogloss:0.50245
[660]	train-mlogloss:0.400031	test-mlogloss:0.502231
[670]	train-mlogloss:0.398478	test-mlogloss:0.502024
[680]	train-mlogloss:0.396996	test-mlogloss:0.501896
[690]	train-mlogloss:0.39552	test-mlogloss:0.501757
[700]	train-mlogloss:0.394062	test-mlogloss:0.501609
[710]	train-mlogloss:0.392646	test-mlogloss:0.501482
[720]	train-mlogloss:0.391114	test-mlogloss:0.501371
[730]	train-mlogloss:0.389477	test-mlogloss:0.50125
[740]	train-mlogloss:0.387982	test-mlogloss:0.501136
[750]	train-mlogloss:0.386486	test-mlogloss:0.500993
[760]	train-mlogloss:0.385004	test-mlogloss:0.500863
[770]	train-mlogloss:0.383541	test-mlogloss:0.500759
[780]	train-mlogloss:0.382163	test-mlogloss:0.500619
[790]	train-mlogloss:0.380758	test-mlogloss:0.500487
[800]	train-mlogloss:0.379524	test-mlogloss:0.500451
[810]	train-mlogloss:0.378151	test-mlogloss:0.500372
[820]	train-mlogloss:0.376807	test-mlogloss:0.500223
[830]	train-mlogloss:0.375559	test-mlogloss:0.500143
[840]	train-mlogloss:0.373986	test-mlogloss:0.500082
[850]	train-mlogloss:0.372618	test-mlogloss:0.500018
[860]	train-mlogloss:0.371348	test-mlogloss:0.499958
[870]	train-mlogloss:0.369911	test-mlogloss:0.499881
[880]	train-mlogloss:0.368672	test-mlogloss:0.499813
[890]	train-mlogloss:0.367341	test-mlogloss:0.499765
[900]	train-mlogloss:0.366001	test-mlogloss:0.49965
[910]	train-mlogloss:0.364541	test-mlogloss:0.499531
[920]	train-mlogloss:0.363174	test-mlogloss:0.499505
[930]	train-mlogloss:0.361759	test-mlogloss:0.499427
[940]	train-mlogloss:0.360428	test-mlogloss:0.499464
[950]	train-mlogloss:0.359149	test-mlogloss:0.499385
[960]	train-mlogloss:0.357968	test-mlogloss:0.499391
[970]	train-mlogloss:0.356623	test-mlogloss:0.499255
[980]	train-mlogloss:0.355306	test-mlogloss:0.499207
[990]	train-mlogloss:0.353882	test-mlogloss:0.499222
[1000]	train-mlogloss:0.352617	test-mlogloss:0.499154
[1010]	train-mlogloss:0.351409	test-mlogloss:0.499156
[1020]	train-mlogloss:0.350147	test-mlogloss:0.499045
[1030]	train-mlogloss:0.34897	test-mlogloss:0.498973
[1040]	train-mlogloss:0.347758	test-mlogloss:0.498882
[1050]	train-mlogloss:0.346547	test-mlogloss:0.498855
[1060]	train-mlogloss:0.345534	test-mlogloss:0.498839
[1070]	train-mlogloss:0.344211	test-mlogloss:0.498829
[1080]	train-mlogloss:0.342947	test-mlogloss:0.498875
[1090]	train-mlogloss:0.341608	test-mlogloss:0.498814
[1100]	train-mlogloss:0.340413	test-mlogloss:0.498727
[1110]	train-mlogloss:0.339085	test-mlogloss:0.498691
[1120]	train-mlogloss:0.337854	test-mlogloss:0.498693
[1130]	train-mlogloss:0.336573	test-mlogloss:0.498627
[1140]	train-mlogloss:0.335439	test-mlogloss:0.498638
[1150]	train-mlogloss:0.334341	test-mlogloss:0.498565
[1160]	train-mlogloss:0.333201	test-mlogloss:0.498523
[1170]	train-mlogloss:0.331987	test-mlogloss:0.498471
[1180]	train-mlogloss:0.330784	test-mlogloss:0.498404
[1190]	train-mlogloss:0.329713	test-mlogloss:0.498332
[1200]	train-mlogloss:0.328568	test-mlogloss:0.498337
[1210]	train-mlogloss:0.327377	test-mlogloss:0.498299
[1220]	train-mlogloss:0.326227	test-mlogloss:0.498203
[1230]	train-mlogloss:0.325056	test-mlogloss:0.49817
[1240]	train-mlogloss:0.323964	test-mlogloss:0.498127
[1250]	train-mlogloss:0.322897	test-mlogloss:0.498058
[1260]	train-mlogloss:0.321775	test-mlogloss:0.498072
[1270]	train-mlogloss:0.320643	test-mlogloss:0.498034
[1280]	train-mlogloss:0.319485	test-mlogloss:0.498027
[1290]	train-mlogloss:0.318415	test-mlogloss:0.498002
[1300]	train-mlogloss:0.317275	test-mlogloss:0.49798
[1310]	train-mlogloss:0.31609	test-mlogloss:0.497851
[1320]	train-mlogloss:0.314926	test-mlogloss:0.497807
[1330]	train-mlogloss:0.313965	test-mlogloss:0.497786
[1340]	train-mlogloss:0.31287	test-mlogloss:0.497706
[1350]	train-mlogloss:0.311729	test-mlogloss:0.497748
[1360]	train-mlogloss:0.310562	test-mlogloss:0.497786
[1370]	train-mlogloss:0.309455	test-mlogloss:0.497731
[1380]	train-mlogloss:0.308319	test-mlogloss:0.497693
[1390]	train-mlogloss:0.30722	test-mlogloss:0.497694
[1400]	train-mlogloss:0.306219	test-mlogloss:0.497741
[1410]	train-mlogloss:0.305098	test-mlogloss:0.497743
[1420]	train-mlogloss:0.303938	test-mlogloss:0.497693
Stopping. Best iteration:
[1377]	train-mlogloss:0.308642	test-mlogloss:0.49767

[0.50231927851655944, 0.49766997270751701]
[0]	train-mlogloss:1.08431	test-mlogloss:1.08476
Multiple eval metrics have been passed: 'test-mlogloss' will be used for early stopping.

Will train until test-mlogloss hasn't improved in 50 rounds.
[10]	train-mlogloss:0.962679	test-mlogloss:0.967227
[20]	train-mlogloss:0.871248	test-mlogloss:0.87946
[30]	train-mlogloss:0.800171	test-mlogloss:0.811984
[40]	train-mlogloss:0.744861	test-mlogloss:0.759692
[50]	train-mlogloss:0.700276	test-mlogloss:0.718042
[60]	train-mlogloss:0.664751	test-mlogloss:0.685303
[70]	train-mlogloss:0.635967	test-mlogloss:0.659208
[80]	train-mlogloss:0.612232	test-mlogloss:0.637944
[90]	train-mlogloss:0.592612	test-mlogloss:0.620556
[100]	train-mlogloss:0.576045	test-mlogloss:0.60633
[110]	train-mlogloss:0.562108	test-mlogloss:0.594724
[120]	train-mlogloss:0.550187	test-mlogloss:0.584971
[130]	train-mlogloss:0.540006	test-mlogloss:0.576895
[140]	train-mlogloss:0.53126	test-mlogloss:0.570185
[150]	train-mlogloss:0.523529	test-mlogloss:0.564433
[160]	train-mlogloss:0.516782	test-mlogloss:0.559692
[170]	train-mlogloss:0.51066	test-mlogloss:0.555525
[180]	train-mlogloss:0.505269	test-mlogloss:0.55196
[190]	train-mlogloss:0.500184	test-mlogloss:0.548891
[200]	train-mlogloss:0.495649	test-mlogloss:0.546101
[210]	train-mlogloss:0.491543	test-mlogloss:0.543685
[220]	train-mlogloss:0.487629	test-mlogloss:0.541495
[230]	train-mlogloss:0.483929	test-mlogloss:0.539578
[240]	train-mlogloss:0.480565	test-mlogloss:0.537846
[250]	train-mlogloss:0.477315	test-mlogloss:0.536392
[260]	train-mlogloss:0.474079	test-mlogloss:0.534995
[270]	train-mlogloss:0.4713	test-mlogloss:0.53378
[280]	train-mlogloss:0.468514	test-mlogloss:0.532712
[290]	train-mlogloss:0.465679	test-mlogloss:0.5316
[300]	train-mlogloss:0.463125	test-mlogloss:0.530688
[310]	train-mlogloss:0.460677	test-mlogloss:0.529862
[320]	train-mlogloss:0.458124	test-mlogloss:0.528929
[330]	train-mlogloss:0.455666	test-mlogloss:0.528093
[340]	train-mlogloss:0.453238	test-mlogloss:0.527408
[350]	train-mlogloss:0.451076	test-mlogloss:0.526774
[360]	train-mlogloss:0.448773	test-mlogloss:0.526122
[370]	train-mlogloss:0.446306	test-mlogloss:0.525499
[380]	train-mlogloss:0.444211	test-mlogloss:0.524858
[390]	train-mlogloss:0.442146	test-mlogloss:0.524324
[400]	train-mlogloss:0.440019	test-mlogloss:0.523839
[410]	train-mlogloss:0.437939	test-mlogloss:0.523295
[420]	train-mlogloss:0.43591	test-mlogloss:0.522826
[430]	train-mlogloss:0.434083	test-mlogloss:0.522367
[440]	train-mlogloss:0.432162	test-mlogloss:0.521984
[450]	train-mlogloss:0.430234	test-mlogloss:0.521555
[460]	train-mlogloss:0.428296	test-mlogloss:0.521172
[470]	train-mlogloss:0.426646	test-mlogloss:0.520874
[480]	train-mlogloss:0.424938	test-mlogloss:0.52059
[490]	train-mlogloss:0.423176	test-mlogloss:0.520193
[500]	train-mlogloss:0.421415	test-mlogloss:0.519923
[510]	train-mlogloss:0.41956	test-mlogloss:0.519607
[520]	train-mlogloss:0.417841	test-mlogloss:0.519278
[530]	train-mlogloss:0.416335	test-mlogloss:0.51906
[540]	train-mlogloss:0.414498	test-mlogloss:0.518725
[550]	train-mlogloss:0.412828	test-mlogloss:0.518444
[560]	train-mlogloss:0.411052	test-mlogloss:0.518177
[570]	train-mlogloss:0.409329	test-mlogloss:0.517917
[580]	train-mlogloss:0.40792	test-mlogloss:0.5176
[590]	train-mlogloss:0.40641	test-mlogloss:0.517341
[600]	train-mlogloss:0.404802	test-mlogloss:0.517247
[610]	train-mlogloss:0.403251	test-mlogloss:0.516944
[620]	train-mlogloss:0.401522	test-mlogloss:0.516704
[630]	train-mlogloss:0.399769	test-mlogloss:0.516512
[640]	train-mlogloss:0.398225	test-mlogloss:0.516293
[650]	train-mlogloss:0.396819	test-mlogloss:0.516079
[660]	train-mlogloss:0.395257	test-mlogloss:0.515928
[670]	train-mlogloss:0.393573	test-mlogloss:0.515687
[680]	train-mlogloss:0.392049	test-mlogloss:0.515454
[690]	train-mlogloss:0.39064	test-mlogloss:0.515271
[700]	train-mlogloss:0.389209	test-mlogloss:0.51512
[710]	train-mlogloss:0.387758	test-mlogloss:0.514951
[720]	train-mlogloss:0.386391	test-mlogloss:0.514868
[730]	train-mlogloss:0.384904	test-mlogloss:0.514735
[740]	train-mlogloss:0.383548	test-mlogloss:0.514565
[750]	train-mlogloss:0.382238	test-mlogloss:0.514546
[760]	train-mlogloss:0.380795	test-mlogloss:0.514414
[770]	train-mlogloss:0.379292	test-mlogloss:0.514202
[780]	train-mlogloss:0.377871	test-mlogloss:0.514171
[790]	train-mlogloss:0.376491	test-mlogloss:0.514084
[800]	train-mlogloss:0.375078	test-mlogloss:0.513971
[810]	train-mlogloss:0.373729	test-mlogloss:0.513871
[820]	train-mlogloss:0.372445	test-mlogloss:0.513703
[830]	train-mlogloss:0.3711	test-mlogloss:0.513552
[840]	train-mlogloss:0.369685	test-mlogloss:0.5134
[850]	train-mlogloss:0.368388	test-mlogloss:0.513258
[860]	train-mlogloss:0.366973	test-mlogloss:0.513127
[870]	train-mlogloss:0.365668	test-mlogloss:0.513066
[880]	train-mlogloss:0.364351	test-mlogloss:0.512976
[890]	train-mlogloss:0.362902	test-mlogloss:0.512801
[900]	train-mlogloss:0.361531	test-mlogloss:0.512694
[910]	train-mlogloss:0.36025	test-mlogloss:0.512546
[920]	train-mlogloss:0.358874	test-mlogloss:0.512512
[930]	train-mlogloss:0.357674	test-mlogloss:0.512463
[940]	train-mlogloss:0.356478	test-mlogloss:0.512463
[950]	train-mlogloss:0.355048	test-mlogloss:0.512386
[960]	train-mlogloss:0.353705	test-mlogloss:0.512266
[970]	train-mlogloss:0.352349	test-mlogloss:0.512253
[980]	train-mlogloss:0.350901	test-mlogloss:0.512164
[990]	train-mlogloss:0.349733	test-mlogloss:0.512075
[1000]	train-mlogloss:0.348403	test-mlogloss:0.512024
[1010]	train-mlogloss:0.346921	test-mlogloss:0.511923
[1020]	train-mlogloss:0.345635	test-mlogloss:0.511836
[1030]	train-mlogloss:0.344337	test-mlogloss:0.511741
[1040]	train-mlogloss:0.343103	test-mlogloss:0.511689
[1050]	train-mlogloss:0.341809	test-mlogloss:0.511631
[1060]	train-mlogloss:0.340664	test-mlogloss:0.511553
[1070]	train-mlogloss:0.339449	test-mlogloss:0.511442
[1080]	train-mlogloss:0.338213	test-mlogloss:0.511285
[1090]	train-mlogloss:0.336866	test-mlogloss:0.511274
[1100]	train-mlogloss:0.335756	test-mlogloss:0.511262
[1110]	train-mlogloss:0.33448	test-mlogloss:0.511151
[1120]	train-mlogloss:0.333339	test-mlogloss:0.510987
[1130]	train-mlogloss:0.3321	test-mlogloss:0.510897
[1140]	train-mlogloss:0.330988	test-mlogloss:0.51085
[1150]	train-mlogloss:0.3298	test-mlogloss:0.510789
[1160]	train-mlogloss:0.328664	test-mlogloss:0.510754
[1170]	train-mlogloss:0.327435	test-mlogloss:0.510674
[1180]	train-mlogloss:0.326348	test-mlogloss:0.510697
[1190]	train-mlogloss:0.325299	test-mlogloss:0.510653
[1200]	train-mlogloss:0.324052	test-mlogloss:0.510549
[1210]	train-mlogloss:0.322822	test-mlogloss:0.510566
[1220]	train-mlogloss:0.32163	test-mlogloss:0.510471
[1230]	train-mlogloss:0.32057	test-mlogloss:0.510434
[1240]	train-mlogloss:0.319367	test-mlogloss:0.510428
[1250]	train-mlogloss:0.318204	test-mlogloss:0.510389
[1260]	train-mlogloss:0.316989	test-mlogloss:0.510348
[1270]	train-mlogloss:0.315772	test-mlogloss:0.510302
[1280]	train-mlogloss:0.314591	test-mlogloss:0.510268
[1290]	train-mlogloss:0.313569	test-mlogloss:0.51025
[1300]	train-mlogloss:0.312384	test-mlogloss:0.510271
[1310]	train-mlogloss:0.311255	test-mlogloss:0.51023
[1320]	train-mlogloss:0.31003	test-mlogloss:0.510262
[1330]	train-mlogloss:0.308905	test-mlogloss:0.510158
[1340]	train-mlogloss:0.307737	test-mlogloss:0.510106
[1350]	train-mlogloss:0.306715	test-mlogloss:0.510003
[1360]	train-mlogloss:0.305657	test-mlogloss:0.510008
[1370]	train-mlogloss:0.304645	test-mlogloss:0.510041
[1380]	train-mlogloss:0.303579	test-mlogloss:0.510034
[1390]	train-mlogloss:0.302515	test-mlogloss:0.510048
[1400]	train-mlogloss:0.301491	test-mlogloss:0.510041
Stopping. Best iteration:
[1353]	train-mlogloss:0.306367	test-mlogloss:0.509964

[0.50231927851655944, 0.49766997270751701, 0.5099642237087797]
[0]	train-mlogloss:1.08433	test-mlogloss:1.08476
Multiple eval metrics have been passed: 'test-mlogloss' will be used for early stopping.

Will train until test-mlogloss hasn't improved in 50 rounds.
[10]	train-mlogloss:0.962748	test-mlogloss:0.96671
[20]	train-mlogloss:0.871461	test-mlogloss:0.878693
[30]	train-mlogloss:0.800649	test-mlogloss:0.810915
[40]	train-mlogloss:0.745504	test-mlogloss:0.75844
[50]	train-mlogloss:0.701042	test-mlogloss:0.716552
[60]	train-mlogloss:0.66554	test-mlogloss:0.683544
[70]	train-mlogloss:0.636703	test-mlogloss:0.657203
[80]	train-mlogloss:0.612977	test-mlogloss:0.63568
[90]	train-mlogloss:0.593337	test-mlogloss:0.618297
[100]	train-mlogloss:0.576957	test-mlogloss:0.603997
[110]	train-mlogloss:0.563018	test-mlogloss:0.592214
[120]	train-mlogloss:0.551162	test-mlogloss:0.582563
[130]	train-mlogloss:0.541079	test-mlogloss:0.57441
[140]	train-mlogloss:0.532219	test-mlogloss:0.567428
[150]	train-mlogloss:0.524458	test-mlogloss:0.561566
[160]	train-mlogloss:0.517547	test-mlogloss:0.55645
[170]	train-mlogloss:0.51146	test-mlogloss:0.552194
[180]	train-mlogloss:0.505845	test-mlogloss:0.548246
[190]	train-mlogloss:0.500862	test-mlogloss:0.544935
[200]	train-mlogloss:0.496319	test-mlogloss:0.542131
[210]	train-mlogloss:0.492097	test-mlogloss:0.539601
[220]	train-mlogloss:0.488114	test-mlogloss:0.537266
[230]	train-mlogloss:0.484477	test-mlogloss:0.535261
[240]	train-mlogloss:0.480953	test-mlogloss:0.533349
[250]	train-mlogloss:0.47746	test-mlogloss:0.531733
[260]	train-mlogloss:0.474383	test-mlogloss:0.530274
[270]	train-mlogloss:0.471326	test-mlogloss:0.5289
[280]	train-mlogloss:0.46838	test-mlogloss:0.527656
[290]	train-mlogloss:0.465498	test-mlogloss:0.526366
[300]	train-mlogloss:0.46298	test-mlogloss:0.525298
[310]	train-mlogloss:0.460268	test-mlogloss:0.524332
[320]	train-mlogloss:0.457838	test-mlogloss:0.523477
[330]	train-mlogloss:0.455382	test-mlogloss:0.522607
[340]	train-mlogloss:0.453	test-mlogloss:0.521816
[350]	train-mlogloss:0.450671	test-mlogloss:0.521079
[360]	train-mlogloss:0.448362	test-mlogloss:0.52036
[370]	train-mlogloss:0.4463	test-mlogloss:0.519733
[380]	train-mlogloss:0.444035	test-mlogloss:0.519131
[390]	train-mlogloss:0.441939	test-mlogloss:0.518571
[400]	train-mlogloss:0.439903	test-mlogloss:0.51795
[410]	train-mlogloss:0.437905	test-mlogloss:0.517414
[420]	train-mlogloss:0.436219	test-mlogloss:0.516978
[430]	train-mlogloss:0.434052	test-mlogloss:0.516486
[440]	train-mlogloss:0.432409	test-mlogloss:0.516111
[450]	train-mlogloss:0.430518	test-mlogloss:0.515748
[460]	train-mlogloss:0.428714	test-mlogloss:0.515346
[470]	train-mlogloss:0.42698	test-mlogloss:0.514948
[480]	train-mlogloss:0.425183	test-mlogloss:0.514619
[490]	train-mlogloss:0.423458	test-mlogloss:0.514345
[500]	train-mlogloss:0.421784	test-mlogloss:0.514103
[510]	train-mlogloss:0.420056	test-mlogloss:0.513821
[520]	train-mlogloss:0.41844	test-mlogloss:0.513531
[530]	train-mlogloss:0.416718	test-mlogloss:0.513186
[540]	train-mlogloss:0.415009	test-mlogloss:0.512999
[550]	train-mlogloss:0.413359	test-mlogloss:0.512798
[560]	train-mlogloss:0.411898	test-mlogloss:0.512549
[570]	train-mlogloss:0.410377	test-mlogloss:0.512329
[580]	train-mlogloss:0.40884	test-mlogloss:0.512164
[590]	train-mlogloss:0.407151	test-mlogloss:0.51187
[600]	train-mlogloss:0.405435	test-mlogloss:0.511684
[610]	train-mlogloss:0.403744	test-mlogloss:0.511495
[620]	train-mlogloss:0.402174	test-mlogloss:0.51124
[630]	train-mlogloss:0.400461	test-mlogloss:0.510938
[640]	train-mlogloss:0.39912	test-mlogloss:0.510784
[650]	train-mlogloss:0.39752	test-mlogloss:0.510675
[660]	train-mlogloss:0.395932	test-mlogloss:0.510508
[670]	train-mlogloss:0.394301	test-mlogloss:0.510314
[680]	train-mlogloss:0.392689	test-mlogloss:0.51009
[690]	train-mlogloss:0.391332	test-mlogloss:0.509955
[700]	train-mlogloss:0.389811	test-mlogloss:0.509815
[710]	train-mlogloss:0.3884	test-mlogloss:0.509633
[720]	train-mlogloss:0.386935	test-mlogloss:0.509419
[730]	train-mlogloss:0.385467	test-mlogloss:0.509249
[740]	train-mlogloss:0.384023	test-mlogloss:0.509163
[750]	train-mlogloss:0.382436	test-mlogloss:0.509072
[760]	train-mlogloss:0.381079	test-mlogloss:0.509066
[770]	train-mlogloss:0.379512	test-mlogloss:0.508912
[780]	train-mlogloss:0.378103	test-mlogloss:0.508882
[790]	train-mlogloss:0.376825	test-mlogloss:0.508838
[800]	train-mlogloss:0.375472	test-mlogloss:0.508686
[810]	train-mlogloss:0.374047	test-mlogloss:0.508622
[820]	train-mlogloss:0.372582	test-mlogloss:0.508598
[830]	train-mlogloss:0.371177	test-mlogloss:0.508523
[840]	train-mlogloss:0.369861	test-mlogloss:0.508415
[850]	train-mlogloss:0.368525	test-mlogloss:0.508296
[860]	train-mlogloss:0.367123	test-mlogloss:0.508224
[870]	train-mlogloss:0.365687	test-mlogloss:0.508083
[880]	train-mlogloss:0.364201	test-mlogloss:0.508083
[890]	train-mlogloss:0.362956	test-mlogloss:0.507978
[900]	train-mlogloss:0.361549	test-mlogloss:0.507922
[910]	train-mlogloss:0.360211	test-mlogloss:0.507883
[920]	train-mlogloss:0.358853	test-mlogloss:0.507873
[930]	train-mlogloss:0.357512	test-mlogloss:0.507843
[940]	train-mlogloss:0.356231	test-mlogloss:0.507711
[950]	train-mlogloss:0.354701	test-mlogloss:0.507662
[960]	train-mlogloss:0.353535	test-mlogloss:0.507627
[970]	train-mlogloss:0.352137	test-mlogloss:0.507585
[980]	train-mlogloss:0.350884	test-mlogloss:0.507515
[990]	train-mlogloss:0.349557	test-mlogloss:0.507432
[1000]	train-mlogloss:0.348172	test-mlogloss:0.507319
[1010]	train-mlogloss:0.347053	test-mlogloss:0.50728
[1020]	train-mlogloss:0.345889	test-mlogloss:0.507252
[1030]	train-mlogloss:0.344605	test-mlogloss:0.507209
[1040]	train-mlogloss:0.343288	test-mlogloss:0.507135
[1050]	train-mlogloss:0.342108	test-mlogloss:0.507105
[1060]	train-mlogloss:0.340714	test-mlogloss:0.507049
[1070]	train-mlogloss:0.339415	test-mlogloss:0.506957
[1080]	train-mlogloss:0.338229	test-mlogloss:0.506884
[1090]	train-mlogloss:0.336974	test-mlogloss:0.506867
[1100]	train-mlogloss:0.335772	test-mlogloss:0.506836
[1110]	train-mlogloss:0.334584	test-mlogloss:0.506809
[1120]	train-mlogloss:0.333479	test-mlogloss:0.506865
[1130]	train-mlogloss:0.33225	test-mlogloss:0.506887
[1140]	train-mlogloss:0.331156	test-mlogloss:0.506822
[1150]	train-mlogloss:0.32992	test-mlogloss:0.506728
[1160]	train-mlogloss:0.328791	test-mlogloss:0.506662
[1170]	train-mlogloss:0.327573	test-mlogloss:0.506783
[1180]	train-mlogloss:0.326393	test-mlogloss:0.506756
[1190]	train-mlogloss:0.325177	test-mlogloss:0.506689
[1200]	train-mlogloss:0.324096	test-mlogloss:0.506631
[1210]	train-mlogloss:0.323058	test-mlogloss:0.506584
[1220]	train-mlogloss:0.321941	test-mlogloss:0.50659
[1230]	train-mlogloss:0.320755	test-mlogloss:0.50655
[1240]	train-mlogloss:0.319688	test-mlogloss:0.506486
[1250]	train-mlogloss:0.318475	test-mlogloss:0.506502
[1260]	train-mlogloss:0.317272	test-mlogloss:0.50649
[1270]	train-mlogloss:0.316161	test-mlogloss:0.506547
[1280]	train-mlogloss:0.314983	test-mlogloss:0.506554
[1290]	train-mlogloss:0.313868	test-mlogloss:0.506527
Stopping. Best iteration:
[1240]	train-mlogloss:0.319688	test-mlogloss:0.506486

[0.50231927851655944, 0.49766997270751701, 0.5099642237087797, 0.50648552464250396]
[0]	train-mlogloss:1.08439	test-mlogloss:1.08469
Multiple eval metrics have been passed: 'test-mlogloss' will be used for early stopping.

Will train until test-mlogloss hasn't improved in 50 rounds.
[10]	train-mlogloss:0.961763	test-mlogloss:0.964962
[20]	train-mlogloss:0.86983	test-mlogloss:0.875708
[30]	train-mlogloss:0.799133	test-mlogloss:0.807596
[40]	train-mlogloss:0.743816	test-mlogloss:0.754619
[50]	train-mlogloss:0.700009	test-mlogloss:0.713118
[60]	train-mlogloss:0.66465	test-mlogloss:0.679967
[70]	train-mlogloss:0.635934	test-mlogloss:0.653364
[80]	train-mlogloss:0.612554	test-mlogloss:0.63195
[90]	train-mlogloss:0.59329	test-mlogloss:0.614689
[100]	train-mlogloss:0.577075	test-mlogloss:0.600401
[110]	train-mlogloss:0.563476	test-mlogloss:0.588731
[120]	train-mlogloss:0.551952	test-mlogloss:0.579258
[130]	train-mlogloss:0.541904	test-mlogloss:0.571102
[140]	train-mlogloss:0.533338	test-mlogloss:0.564232
[150]	train-mlogloss:0.525642	test-mlogloss:0.558483
[160]	train-mlogloss:0.518692	test-mlogloss:0.553535
[170]	train-mlogloss:0.512618	test-mlogloss:0.549323
[180]	train-mlogloss:0.507059	test-mlogloss:0.545593
[190]	train-mlogloss:0.502205	test-mlogloss:0.542321
[200]	train-mlogloss:0.497499	test-mlogloss:0.53946
[210]	train-mlogloss:0.492969	test-mlogloss:0.536829
[220]	train-mlogloss:0.488905	test-mlogloss:0.534577
[230]	train-mlogloss:0.485234	test-mlogloss:0.532503
[240]	train-mlogloss:0.481695	test-mlogloss:0.530787
[250]	train-mlogloss:0.478267	test-mlogloss:0.529051
[260]	train-mlogloss:0.47514	test-mlogloss:0.527598
[270]	train-mlogloss:0.472021	test-mlogloss:0.526266
[280]	train-mlogloss:0.469212	test-mlogloss:0.52505
[290]	train-mlogloss:0.466354	test-mlogloss:0.523834
[300]	train-mlogloss:0.463726	test-mlogloss:0.522698
[310]	train-mlogloss:0.461037	test-mlogloss:0.521664
[320]	train-mlogloss:0.458714	test-mlogloss:0.520766
[330]	train-mlogloss:0.456135	test-mlogloss:0.519919
[340]	train-mlogloss:0.453767	test-mlogloss:0.519173
[350]	train-mlogloss:0.451674	test-mlogloss:0.518406
[360]	train-mlogloss:0.44936	test-mlogloss:0.517727
[370]	train-mlogloss:0.447179	test-mlogloss:0.517009
[380]	train-mlogloss:0.445077	test-mlogloss:0.516464
[390]	train-mlogloss:0.443175	test-mlogloss:0.515944
[400]	train-mlogloss:0.441008	test-mlogloss:0.515414
[410]	train-mlogloss:0.43905	test-mlogloss:0.514857
[420]	train-mlogloss:0.437215	test-mlogloss:0.51439
[430]	train-mlogloss:0.435336	test-mlogloss:0.513944
[440]	train-mlogloss:0.433378	test-mlogloss:0.513517
[450]	train-mlogloss:0.431606	test-mlogloss:0.513123
[460]	train-mlogloss:0.429753	test-mlogloss:0.512726
[470]	train-mlogloss:0.427968	test-mlogloss:0.512298
[480]	train-mlogloss:0.426168	test-mlogloss:0.511962
[490]	train-mlogloss:0.424461	test-mlogloss:0.51162
[500]	train-mlogloss:0.422726	test-mlogloss:0.511296
[510]	train-mlogloss:0.42083	test-mlogloss:0.51097
[520]	train-mlogloss:0.419019	test-mlogloss:0.510717
[530]	train-mlogloss:0.417332	test-mlogloss:0.510482
[540]	train-mlogloss:0.415671	test-mlogloss:0.51027
[550]	train-mlogloss:0.413972	test-mlogloss:0.510009
[560]	train-mlogloss:0.412114	test-mlogloss:0.509819
[570]	train-mlogloss:0.410561	test-mlogloss:0.509561
[580]	train-mlogloss:0.409032	test-mlogloss:0.509359
[590]	train-mlogloss:0.407571	test-mlogloss:0.509171
[600]	train-mlogloss:0.405984	test-mlogloss:0.508981
[610]	train-mlogloss:0.404389	test-mlogloss:0.508767
[620]	train-mlogloss:0.402918	test-mlogloss:0.50856
[630]	train-mlogloss:0.401216	test-mlogloss:0.508346
[640]	train-mlogloss:0.399696	test-mlogloss:0.508192
[650]	train-mlogloss:0.398104	test-mlogloss:0.508066
[660]	train-mlogloss:0.396624	test-mlogloss:0.507965
[670]	train-mlogloss:0.395009	test-mlogloss:0.507796
[680]	train-mlogloss:0.393421	test-mlogloss:0.507642
[690]	train-mlogloss:0.391855	test-mlogloss:0.507472
[700]	train-mlogloss:0.390385	test-mlogloss:0.507345
[710]	train-mlogloss:0.388909	test-mlogloss:0.507208
[720]	train-mlogloss:0.387488	test-mlogloss:0.507042
[730]	train-mlogloss:0.385955	test-mlogloss:0.506958
[740]	train-mlogloss:0.384654	test-mlogloss:0.506837
[750]	train-mlogloss:0.383121	test-mlogloss:0.506729
[760]	train-mlogloss:0.38165	test-mlogloss:0.506681
[770]	train-mlogloss:0.380222	test-mlogloss:0.506562
[780]	train-mlogloss:0.378887	test-mlogloss:0.50639
[790]	train-mlogloss:0.377508	test-mlogloss:0.50627
[800]	train-mlogloss:0.376167	test-mlogloss:0.506164
[810]	train-mlogloss:0.374784	test-mlogloss:0.506068
[820]	train-mlogloss:0.37328	test-mlogloss:0.505936
[830]	train-mlogloss:0.371932	test-mlogloss:0.505861
[840]	train-mlogloss:0.370401	test-mlogloss:0.505729
[850]	train-mlogloss:0.369008	test-mlogloss:0.50561
[860]	train-mlogloss:0.367692	test-mlogloss:0.505551
[870]	train-mlogloss:0.366245	test-mlogloss:0.505442
[880]	train-mlogloss:0.36469	test-mlogloss:0.50531
[890]	train-mlogloss:0.363321	test-mlogloss:0.505178
[900]	train-mlogloss:0.361997	test-mlogloss:0.505048
[910]	train-mlogloss:0.360528	test-mlogloss:0.504943
[920]	train-mlogloss:0.359156	test-mlogloss:0.504859
[930]	train-mlogloss:0.357739	test-mlogloss:0.504788
[940]	train-mlogloss:0.356497	test-mlogloss:0.504796
[950]	train-mlogloss:0.355223	test-mlogloss:0.504774
[960]	train-mlogloss:0.353913	test-mlogloss:0.504751
[970]	train-mlogloss:0.352541	test-mlogloss:0.504705
[980]	train-mlogloss:0.351241	test-mlogloss:0.504627
[990]	train-mlogloss:0.350095	test-mlogloss:0.504546
[1000]	train-mlogloss:0.348938	test-mlogloss:0.504416
[1010]	train-mlogloss:0.34771	test-mlogloss:0.504301
[1020]	train-mlogloss:0.346502	test-mlogloss:0.504229
[1030]	train-mlogloss:0.345224	test-mlogloss:0.504126
[1040]	train-mlogloss:0.343934	test-mlogloss:0.504113
[1050]	train-mlogloss:0.34274	test-mlogloss:0.504059
[1060]	train-mlogloss:0.341443	test-mlogloss:0.503963
[1070]	train-mlogloss:0.340282	test-mlogloss:0.503928
[1080]	train-mlogloss:0.339084	test-mlogloss:0.503823
[1090]	train-mlogloss:0.337854	test-mlogloss:0.503796
[1100]	train-mlogloss:0.336574	test-mlogloss:0.503813
[1110]	train-mlogloss:0.335512	test-mlogloss:0.503744
[1120]	train-mlogloss:0.334375	test-mlogloss:0.503704
[1130]	train-mlogloss:0.333203	test-mlogloss:0.503658
[1140]	train-mlogloss:0.332019	test-mlogloss:0.503675
[1150]	train-mlogloss:0.330797	test-mlogloss:0.503687
[1160]	train-mlogloss:0.329572	test-mlogloss:0.503707
[1170]	train-mlogloss:0.328338	test-mlogloss:0.503718
Stopping. Best iteration:
[1128]	train-mlogloss:0.333446	test-mlogloss:0.503646

[0.50231927851655944, 0.49766997270751701, 0.5099642237087797, 0.50648552464250396, 0.50364610942838239]
0.504017044348

In [14]:
dfs3 = run3_to_stackdf(rv3)
pickle.dump(dfs3, open('modeloutput-xgb-clf-r2.pkl', 'wb'))

In [15]:
def run_to_stackdf(run):
    df_testpreds = pd.DataFrame(run[2].mean(axis=0))
    df_testpreds.columns = ['level']
    df_testpreds['listing_id'] = cv_test[0].listing_id
    df_allpreds = pd.concat([run[1][['level', 'listing_id']], df_testpreds])

    df_allpreds.sort_values('listing_id', inplace=True)
    df_allpreds.set_index('listing_id', inplace=True)

    df_fold = []
    for f in range(run[2].shape[0]):
        df_fold.append(pd.DataFrame(run[2][f]))
        df_fold[-1]['listing_id'] = test_df.listing_id
        df_fold[-1].sort_values('listing_id', inplace=True)
        df_fold[-1].set_index('listing_id', inplace=True)

    return (df_allpreds, df_fold)

In [16]:
def runXGB1(train_X, train_y, test_X, test_y=None, feature_names=None, seed_val=0, num_rounds=4000):
    param = {}
    param['objective'] = 'reg:logistic'
    #param['tree_method'] = 'hist'
    param['eta'] = 0.02
    param['max_depth'] = 6
    param['silent'] = 1
    param['num_class'] = 1
    param['eval_metric'] = "rmse"
    param['min_child_weight'] = 1
    param['subsample'] = 0.7
    param['colsample_bytree'] = 0.7
    param['seed'] = seed_val
    param['base_score'] = train_y.mean()
    num_rounds = num_rounds

    plst = list(param.items())
    xgtrain = xgb.DMatrix(train_X, label=train_y)

    if test_y is not None:
        xgtest = xgb.DMatrix(test_X, label=test_y)
        watchlist = [ (xgtrain,'train'), (xgtest, 'test') ]
        model = xgb.train(plst, xgtrain, num_rounds, watchlist, early_stopping_rounds=50, verbose_eval=10)
    else:
        xgtest = xgb.DMatrix(test_X)
        model = xgb.train(plst, xgtrain, num_rounds)

    pred_test_y = model.predict(xgtest, ntree_limit=model.best_ntree_limit)
    return pred_test_y, model

In [17]:
medium_regression_tgt = (.5 + (9/13)) / 2

def run_cv1(train_df, cv_test, kf, features_to_use):
    
    train_X = train_df[features_to_use] #sparse.hstack([train_df[features_to_use], tr_sparse]).tocsr()
    train_y3 = np.array(train_df['interest_level'].apply(lambda x: target_num_map[x]))
    
    train_y = np.zeros_like(train_y3, dtype=np.float32)
    train_y[train_y3 == 1] = medium_regression_tgt
    train_y[train_y3 == 2] = 1

    cv_preds = []
    cv_scores = []
    models = []
    test_preds = []
    
    fold = 0

    for dev_index, val_index in kf.split(range(train_X.shape[0]), train_y):

        dev_X, val_X = train_X.iloc[dev_index], train_X.iloc[val_index]
        dev_y, val_y = train_y[dev_index], train_y[val_index]
        preds, model = runXGB1(dev_X, dev_y, val_X, val_y)
        models.append(model)

        cv_scores.append(model.best_score)
        print(cv_scores)

        cut_df = train_df.iloc[val_index]
        
        out_df = pd.DataFrame(preds)
        out_df.columns = ["level"]
        out_df["listing_id"] = cut_df.listing_id.values
        out_df['interest_tgt'] = val_y # cut_df.interest.values

        cv_preds.append(out_df)

        xgtest = xgb.DMatrix(cv_test[fold][features_to_use])
        test_preds.append(model.predict(xgtest, ntree_limit=model.best_ntree_limit))

    df_cv = pd.concat(cv_preds)
    print(np.sqrt(sklearn.metrics.mean_squared_error(df_cv.interest_tgt, df_cv.level)))
    
    apreds = np.array(test_preds)
    
    return models, df_cv, apreds

In [18]:
rv1 = run_cv1(train_df, cv_test, kf, fl)


[0]	train-rmse:0.334233	test-rmse:0.334264
Multiple eval metrics have been passed: 'test-rmse' will be used for early stopping.

Will train until test-rmse hasn't improved in 50 rounds.
[10]	train-rmse:0.314012	test-rmse:0.314653
[20]	train-rmse:0.298041	test-rmse:0.299375
[30]	train-rmse:0.285641	test-rmse:0.287604
[40]	train-rmse:0.275962	test-rmse:0.278544
[50]	train-rmse:0.268646	test-rmse:0.27187
[60]	train-rmse:0.262838	test-rmse:0.26663
[70]	train-rmse:0.258499	test-rmse:0.262806
[80]	train-rmse:0.254907	test-rmse:0.259728
[90]	train-rmse:0.251964	test-rmse:0.257245
[100]	train-rmse:0.249505	test-rmse:0.255299
[110]	train-rmse:0.247396	test-rmse:0.253605
[120]	train-rmse:0.245677	test-rmse:0.252278
[130]	train-rmse:0.244128	test-rmse:0.251089
[140]	train-rmse:0.242778	test-rmse:0.250065
[150]	train-rmse:0.241542	test-rmse:0.249207
[160]	train-rmse:0.240455	test-rmse:0.248543
[170]	train-rmse:0.239464	test-rmse:0.247846
[180]	train-rmse:0.238531	test-rmse:0.247266
[190]	train-rmse:0.237702	test-rmse:0.246759
[200]	train-rmse:0.236784	test-rmse:0.246259
[210]	train-rmse:0.235908	test-rmse:0.24583
[220]	train-rmse:0.235131	test-rmse:0.245453
[230]	train-rmse:0.234462	test-rmse:0.245093
[240]	train-rmse:0.233854	test-rmse:0.244763
[250]	train-rmse:0.233231	test-rmse:0.244458
[260]	train-rmse:0.23261	test-rmse:0.244196
[270]	train-rmse:0.232083	test-rmse:0.243964
[280]	train-rmse:0.231555	test-rmse:0.243665
[290]	train-rmse:0.230909	test-rmse:0.243473
[300]	train-rmse:0.230425	test-rmse:0.243253
[310]	train-rmse:0.22994	test-rmse:0.243054
[320]	train-rmse:0.229343	test-rmse:0.242865
[330]	train-rmse:0.228855	test-rmse:0.242692
[340]	train-rmse:0.228492	test-rmse:0.242553
[350]	train-rmse:0.227986	test-rmse:0.242406
[360]	train-rmse:0.22745	test-rmse:0.242221
[370]	train-rmse:0.226923	test-rmse:0.242043
[380]	train-rmse:0.226426	test-rmse:0.241914
[390]	train-rmse:0.226024	test-rmse:0.24181
[400]	train-rmse:0.225432	test-rmse:0.241647
[410]	train-rmse:0.224855	test-rmse:0.241483
[420]	train-rmse:0.22429	test-rmse:0.241382
[430]	train-rmse:0.223848	test-rmse:0.241254
[440]	train-rmse:0.223427	test-rmse:0.241125
[450]	train-rmse:0.222934	test-rmse:0.240989
[460]	train-rmse:0.222489	test-rmse:0.240886
[470]	train-rmse:0.221944	test-rmse:0.240768
[480]	train-rmse:0.221483	test-rmse:0.240672
[490]	train-rmse:0.221089	test-rmse:0.240528
[500]	train-rmse:0.220645	test-rmse:0.240443
[510]	train-rmse:0.220311	test-rmse:0.240416
[520]	train-rmse:0.219752	test-rmse:0.240345
[530]	train-rmse:0.219321	test-rmse:0.24027
[540]	train-rmse:0.218855	test-rmse:0.240197
[550]	train-rmse:0.218269	test-rmse:0.240145
[560]	train-rmse:0.217705	test-rmse:0.240061
[570]	train-rmse:0.217317	test-rmse:0.240014
[580]	train-rmse:0.216752	test-rmse:0.239903
[590]	train-rmse:0.216381	test-rmse:0.239835
[600]	train-rmse:0.215899	test-rmse:0.239772
[610]	train-rmse:0.215545	test-rmse:0.239722
[620]	train-rmse:0.215098	test-rmse:0.239646
[630]	train-rmse:0.214699	test-rmse:0.239586
[640]	train-rmse:0.214299	test-rmse:0.239493
[650]	train-rmse:0.213847	test-rmse:0.239457
[660]	train-rmse:0.213376	test-rmse:0.239434
[670]	train-rmse:0.212935	test-rmse:0.239386
[680]	train-rmse:0.21257	test-rmse:0.239323
[690]	train-rmse:0.212101	test-rmse:0.23929
[700]	train-rmse:0.211616	test-rmse:0.2392
[710]	train-rmse:0.21128	test-rmse:0.239163
[720]	train-rmse:0.210823	test-rmse:0.239117
[730]	train-rmse:0.2104	test-rmse:0.23905
[740]	train-rmse:0.209955	test-rmse:0.238969
[750]	train-rmse:0.209529	test-rmse:0.238917
[760]	train-rmse:0.209099	test-rmse:0.238877
[770]	train-rmse:0.20865	test-rmse:0.23882
[780]	train-rmse:0.208293	test-rmse:0.238789
[790]	train-rmse:0.207822	test-rmse:0.238711
[800]	train-rmse:0.207506	test-rmse:0.238657
[810]	train-rmse:0.207126	test-rmse:0.238652
[820]	train-rmse:0.206701	test-rmse:0.238632
[830]	train-rmse:0.206261	test-rmse:0.23859
[840]	train-rmse:0.205852	test-rmse:0.238579
[850]	train-rmse:0.205446	test-rmse:0.238589
[860]	train-rmse:0.205028	test-rmse:0.238548
[870]	train-rmse:0.204652	test-rmse:0.238524
[880]	train-rmse:0.204244	test-rmse:0.238462
[890]	train-rmse:0.203967	test-rmse:0.238446
[900]	train-rmse:0.203478	test-rmse:0.238393
[910]	train-rmse:0.203124	test-rmse:0.238364
[920]	train-rmse:0.202743	test-rmse:0.238333
[930]	train-rmse:0.20228	test-rmse:0.238299
[940]	train-rmse:0.201933	test-rmse:0.238286
[950]	train-rmse:0.20149	test-rmse:0.238266
[960]	train-rmse:0.201056	test-rmse:0.238251
[970]	train-rmse:0.200724	test-rmse:0.238244
[980]	train-rmse:0.200276	test-rmse:0.238277
[990]	train-rmse:0.199802	test-rmse:0.238251
[1000]	train-rmse:0.199409	test-rmse:0.238218
[1010]	train-rmse:0.199095	test-rmse:0.238182
[1020]	train-rmse:0.198726	test-rmse:0.238174
[1030]	train-rmse:0.198415	test-rmse:0.238151
[1040]	train-rmse:0.198099	test-rmse:0.23811
[1050]	train-rmse:0.19771	test-rmse:0.238082
[1060]	train-rmse:0.197313	test-rmse:0.238053
[1070]	train-rmse:0.196985	test-rmse:0.238035
[1080]	train-rmse:0.196588	test-rmse:0.237977
[1090]	train-rmse:0.196283	test-rmse:0.237991
[1100]	train-rmse:0.195856	test-rmse:0.237978
[1110]	train-rmse:0.195496	test-rmse:0.23792
[1120]	train-rmse:0.195102	test-rmse:0.237923
[1130]	train-rmse:0.194697	test-rmse:0.237918
[1140]	train-rmse:0.194325	test-rmse:0.237919
[1150]	train-rmse:0.193948	test-rmse:0.237883
[1160]	train-rmse:0.193557	test-rmse:0.23787
[1170]	train-rmse:0.193156	test-rmse:0.237819
[1180]	train-rmse:0.192772	test-rmse:0.237783
[1190]	train-rmse:0.192395	test-rmse:0.237775
[1200]	train-rmse:0.191983	test-rmse:0.237735
[1210]	train-rmse:0.191628	test-rmse:0.237723
[1220]	train-rmse:0.1913	test-rmse:0.237704
[1230]	train-rmse:0.190886	test-rmse:0.23769
[1240]	train-rmse:0.190491	test-rmse:0.237648
[1250]	train-rmse:0.190118	test-rmse:0.237631
[1260]	train-rmse:0.189807	test-rmse:0.237623
[1270]	train-rmse:0.189374	test-rmse:0.237621
[1280]	train-rmse:0.189034	test-rmse:0.237573
[1290]	train-rmse:0.188687	test-rmse:0.237553
[1300]	train-rmse:0.188328	test-rmse:0.23752
[1310]	train-rmse:0.187944	test-rmse:0.237524
[1320]	train-rmse:0.18759	test-rmse:0.237499
[1330]	train-rmse:0.187232	test-rmse:0.237499
[1340]	train-rmse:0.186852	test-rmse:0.237487
[1350]	train-rmse:0.18655	test-rmse:0.237485
[1360]	train-rmse:0.186206	test-rmse:0.237498
[1370]	train-rmse:0.185813	test-rmse:0.237488
[1380]	train-rmse:0.185409	test-rmse:0.23751
[1390]	train-rmse:0.185028	test-rmse:0.23749
[1400]	train-rmse:0.184726	test-rmse:0.23747
[1410]	train-rmse:0.184387	test-rmse:0.237464
[1420]	train-rmse:0.184027	test-rmse:0.237457
[1430]	train-rmse:0.183658	test-rmse:0.237445
[1440]	train-rmse:0.183328	test-rmse:0.237449
[1450]	train-rmse:0.183	test-rmse:0.237411
[1460]	train-rmse:0.182675	test-rmse:0.237413
[1470]	train-rmse:0.182375	test-rmse:0.23739
[1480]	train-rmse:0.182008	test-rmse:0.237402
[1490]	train-rmse:0.181676	test-rmse:0.237406
[1500]	train-rmse:0.181379	test-rmse:0.237405
[1510]	train-rmse:0.181114	test-rmse:0.23738
[1520]	train-rmse:0.180806	test-rmse:0.237349
[1530]	train-rmse:0.180455	test-rmse:0.237374
[1540]	train-rmse:0.180107	test-rmse:0.237366
[1550]	train-rmse:0.179854	test-rmse:0.237372
[1560]	train-rmse:0.179525	test-rmse:0.237353
[1570]	train-rmse:0.179166	test-rmse:0.237361
[1580]	train-rmse:0.178845	test-rmse:0.237331
[1590]	train-rmse:0.178518	test-rmse:0.237296
[1600]	train-rmse:0.178245	test-rmse:0.237274
[1610]	train-rmse:0.177913	test-rmse:0.23725
[1620]	train-rmse:0.177645	test-rmse:0.237214
[1630]	train-rmse:0.177377	test-rmse:0.237203
[1640]	train-rmse:0.17712	test-rmse:0.237211
[1650]	train-rmse:0.176779	test-rmse:0.237205
[1660]	train-rmse:0.176445	test-rmse:0.237192
[1670]	train-rmse:0.176067	test-rmse:0.237167
[1680]	train-rmse:0.175757	test-rmse:0.237144
[1690]	train-rmse:0.175455	test-rmse:0.23714
[1700]	train-rmse:0.175116	test-rmse:0.237155
[1710]	train-rmse:0.174834	test-rmse:0.237139
[1720]	train-rmse:0.17453	test-rmse:0.23712
[1730]	train-rmse:0.174263	test-rmse:0.237133
[1740]	train-rmse:0.173905	test-rmse:0.237113
[1750]	train-rmse:0.173612	test-rmse:0.23712
[1760]	train-rmse:0.173283	test-rmse:0.237126
[1770]	train-rmse:0.172928	test-rmse:0.237119
[1780]	train-rmse:0.172637	test-rmse:0.237109
[1790]	train-rmse:0.17235	test-rmse:0.237122
[1800]	train-rmse:0.171931	test-rmse:0.237121
[1810]	train-rmse:0.171647	test-rmse:0.237117
[1820]	train-rmse:0.171315	test-rmse:0.237091
[1830]	train-rmse:0.171059	test-rmse:0.237098
[1840]	train-rmse:0.170676	test-rmse:0.237112
[1850]	train-rmse:0.170377	test-rmse:0.237092
[1860]	train-rmse:0.169992	test-rmse:0.237073
[1870]	train-rmse:0.169709	test-rmse:0.237039
[1880]	train-rmse:0.169413	test-rmse:0.237044
[1890]	train-rmse:0.16915	test-rmse:0.237021
[1900]	train-rmse:0.168791	test-rmse:0.236989
[1910]	train-rmse:0.168474	test-rmse:0.236955
[1920]	train-rmse:0.168183	test-rmse:0.236972
[1930]	train-rmse:0.167964	test-rmse:0.236955
[1940]	train-rmse:0.167628	test-rmse:0.236923
[1950]	train-rmse:0.167316	test-rmse:0.236951
[1960]	train-rmse:0.166973	test-rmse:0.236955
[1970]	train-rmse:0.166656	test-rmse:0.236956
[1980]	train-rmse:0.16638	test-rmse:0.236967
[1990]	train-rmse:0.166086	test-rmse:0.236955
Stopping. Best iteration:
[1941]	train-rmse:0.167584	test-rmse:0.236917

[0.236917]
[0]	train-rmse:0.334184	test-rmse:0.334238
Multiple eval metrics have been passed: 'test-rmse' will be used for early stopping.

Will train until test-rmse hasn't improved in 50 rounds.
[10]	train-rmse:0.314015	test-rmse:0.314393
[20]	train-rmse:0.298107	test-rmse:0.298861
[30]	train-rmse:0.285832	test-rmse:0.287077
[40]	train-rmse:0.27621	test-rmse:0.277946
[50]	train-rmse:0.268934	test-rmse:0.271077
[60]	train-rmse:0.263256	test-rmse:0.265902
[70]	train-rmse:0.258898	test-rmse:0.261936
[80]	train-rmse:0.255268	test-rmse:0.258771
[90]	train-rmse:0.25233	test-rmse:0.256298
[100]	train-rmse:0.249881	test-rmse:0.254297
[110]	train-rmse:0.247855	test-rmse:0.25266
[120]	train-rmse:0.245988	test-rmse:0.251295
[130]	train-rmse:0.24433	test-rmse:0.250095
[140]	train-rmse:0.242932	test-rmse:0.249067
[150]	train-rmse:0.241769	test-rmse:0.248284
[160]	train-rmse:0.240664	test-rmse:0.247489
[170]	train-rmse:0.2397	test-rmse:0.246856
[180]	train-rmse:0.238754	test-rmse:0.246291
[190]	train-rmse:0.237718	test-rmse:0.245707
[200]	train-rmse:0.236778	test-rmse:0.245219
[210]	train-rmse:0.235981	test-rmse:0.244786
[220]	train-rmse:0.235201	test-rmse:0.24437
[230]	train-rmse:0.234397	test-rmse:0.244046
[240]	train-rmse:0.233706	test-rmse:0.243718
[250]	train-rmse:0.233044	test-rmse:0.243418
[260]	train-rmse:0.232418	test-rmse:0.243181
[270]	train-rmse:0.231958	test-rmse:0.242946
[280]	train-rmse:0.231526	test-rmse:0.242746
[290]	train-rmse:0.230937	test-rmse:0.242534
[300]	train-rmse:0.230442	test-rmse:0.24236
[310]	train-rmse:0.229842	test-rmse:0.242147
[320]	train-rmse:0.229281	test-rmse:0.242004
[330]	train-rmse:0.228775	test-rmse:0.24182
[340]	train-rmse:0.228274	test-rmse:0.241671
[350]	train-rmse:0.227668	test-rmse:0.241502
[360]	train-rmse:0.227203	test-rmse:0.24141
[370]	train-rmse:0.22664	test-rmse:0.241309
[380]	train-rmse:0.226218	test-rmse:0.241193
[390]	train-rmse:0.225723	test-rmse:0.241101
[400]	train-rmse:0.22533	test-rmse:0.240988
[410]	train-rmse:0.224732	test-rmse:0.240869
[420]	train-rmse:0.224203	test-rmse:0.240801
[430]	train-rmse:0.223614	test-rmse:0.240703
[440]	train-rmse:0.2231	test-rmse:0.240614
[450]	train-rmse:0.222677	test-rmse:0.240509
[460]	train-rmse:0.222234	test-rmse:0.240423
[470]	train-rmse:0.22164	test-rmse:0.240349
[480]	train-rmse:0.221155	test-rmse:0.240245
[490]	train-rmse:0.220672	test-rmse:0.240181
[500]	train-rmse:0.220094	test-rmse:0.240064
[510]	train-rmse:0.219663	test-rmse:0.240029
[520]	train-rmse:0.21922	test-rmse:0.239952
[530]	train-rmse:0.218783	test-rmse:0.239908
[540]	train-rmse:0.218281	test-rmse:0.239847
[550]	train-rmse:0.217801	test-rmse:0.239797
[560]	train-rmse:0.217358	test-rmse:0.239747
[570]	train-rmse:0.216915	test-rmse:0.2397
[580]	train-rmse:0.216619	test-rmse:0.239689
[590]	train-rmse:0.216102	test-rmse:0.239621
[600]	train-rmse:0.215654	test-rmse:0.239598
[610]	train-rmse:0.215218	test-rmse:0.239517
[620]	train-rmse:0.2148	test-rmse:0.239461
[630]	train-rmse:0.214447	test-rmse:0.239413
[640]	train-rmse:0.213951	test-rmse:0.239374
[650]	train-rmse:0.21346	test-rmse:0.239346
[660]	train-rmse:0.212991	test-rmse:0.239288
[670]	train-rmse:0.21257	test-rmse:0.23922
[680]	train-rmse:0.212035	test-rmse:0.239131
[690]	train-rmse:0.211556	test-rmse:0.23911
[700]	train-rmse:0.211132	test-rmse:0.239045
[710]	train-rmse:0.210781	test-rmse:0.23901
[720]	train-rmse:0.210356	test-rmse:0.238977
[730]	train-rmse:0.209969	test-rmse:0.238947
[740]	train-rmse:0.209516	test-rmse:0.238888
[750]	train-rmse:0.209099	test-rmse:0.238839
[760]	train-rmse:0.208756	test-rmse:0.238809
[770]	train-rmse:0.208323	test-rmse:0.238749
[780]	train-rmse:0.207921	test-rmse:0.238737
[790]	train-rmse:0.207541	test-rmse:0.238707
[800]	train-rmse:0.207147	test-rmse:0.238708
[810]	train-rmse:0.206685	test-rmse:0.238664
[820]	train-rmse:0.206319	test-rmse:0.238626
[830]	train-rmse:0.20588	test-rmse:0.238567
[840]	train-rmse:0.205486	test-rmse:0.238556
[850]	train-rmse:0.205072	test-rmse:0.238495
[860]	train-rmse:0.204668	test-rmse:0.238495
[870]	train-rmse:0.204147	test-rmse:0.238466
[880]	train-rmse:0.203759	test-rmse:0.238438
[890]	train-rmse:0.203383	test-rmse:0.238457
[900]	train-rmse:0.202942	test-rmse:0.238418
[910]	train-rmse:0.202559	test-rmse:0.238439
[920]	train-rmse:0.202109	test-rmse:0.238419
[930]	train-rmse:0.201662	test-rmse:0.238394
[940]	train-rmse:0.20124	test-rmse:0.238347
[950]	train-rmse:0.20075	test-rmse:0.238273
[960]	train-rmse:0.200377	test-rmse:0.238207
[970]	train-rmse:0.199931	test-rmse:0.238194
[980]	train-rmse:0.199536	test-rmse:0.23817
[990]	train-rmse:0.199121	test-rmse:0.238127
[1000]	train-rmse:0.198773	test-rmse:0.238098
[1010]	train-rmse:0.198432	test-rmse:0.238094
[1020]	train-rmse:0.198086	test-rmse:0.2381
[1030]	train-rmse:0.197676	test-rmse:0.238114
[1040]	train-rmse:0.197302	test-rmse:0.238107
[1050]	train-rmse:0.196956	test-rmse:0.238085
[1060]	train-rmse:0.196592	test-rmse:0.238106
[1070]	train-rmse:0.196281	test-rmse:0.23809
[1080]	train-rmse:0.195854	test-rmse:0.238087
[1090]	train-rmse:0.195516	test-rmse:0.238041
[1100]	train-rmse:0.195189	test-rmse:0.238032
[1110]	train-rmse:0.194821	test-rmse:0.238005
[1120]	train-rmse:0.194416	test-rmse:0.238008
[1130]	train-rmse:0.194082	test-rmse:0.237981
[1140]	train-rmse:0.193689	test-rmse:0.237963
[1150]	train-rmse:0.193251	test-rmse:0.237936
[1160]	train-rmse:0.192918	test-rmse:0.237911
[1170]	train-rmse:0.192614	test-rmse:0.237917
[1180]	train-rmse:0.192316	test-rmse:0.237933
[1190]	train-rmse:0.191991	test-rmse:0.237915
[1200]	train-rmse:0.191717	test-rmse:0.237927
[1210]	train-rmse:0.191356	test-rmse:0.237939
Stopping. Best iteration:
[1161]	train-rmse:0.192894	test-rmse:0.237903

[0.236917, 0.237903]
[0]	train-rmse:0.334063	test-rmse:0.334205
Multiple eval metrics have been passed: 'test-rmse' will be used for early stopping.

Will train until test-rmse hasn't improved in 50 rounds.
[10]	train-rmse:0.313525	test-rmse:0.315191
[20]	train-rmse:0.297398	test-rmse:0.300585
[30]	train-rmse:0.284925	test-rmse:0.289381
[40]	train-rmse:0.275164	test-rmse:0.280754
[50]	train-rmse:0.267825	test-rmse:0.2744
[60]	train-rmse:0.262022	test-rmse:0.26952
[70]	train-rmse:0.257595	test-rmse:0.265922
[80]	train-rmse:0.253955	test-rmse:0.262993
[90]	train-rmse:0.250952	test-rmse:0.26066
[100]	train-rmse:0.248557	test-rmse:0.258846
[110]	train-rmse:0.246562	test-rmse:0.257325
[120]	train-rmse:0.244837	test-rmse:0.256149
[130]	train-rmse:0.243289	test-rmse:0.254997
[140]	train-rmse:0.241908	test-rmse:0.254044
[150]	train-rmse:0.240748	test-rmse:0.253272
[160]	train-rmse:0.239621	test-rmse:0.252581
[170]	train-rmse:0.238604	test-rmse:0.251937
[180]	train-rmse:0.237642	test-rmse:0.251372
[190]	train-rmse:0.236721	test-rmse:0.250821
[200]	train-rmse:0.235851	test-rmse:0.25037
[210]	train-rmse:0.235195	test-rmse:0.24999
[220]	train-rmse:0.234573	test-rmse:0.249585
[230]	train-rmse:0.233825	test-rmse:0.249217
[240]	train-rmse:0.233037	test-rmse:0.248826
[250]	train-rmse:0.232471	test-rmse:0.248556
[260]	train-rmse:0.231764	test-rmse:0.248267
[270]	train-rmse:0.23112	test-rmse:0.247993
[280]	train-rmse:0.230509	test-rmse:0.247719
[290]	train-rmse:0.229985	test-rmse:0.247495
[300]	train-rmse:0.229403	test-rmse:0.247231
[310]	train-rmse:0.229059	test-rmse:0.247079
[320]	train-rmse:0.228585	test-rmse:0.246928
[330]	train-rmse:0.228095	test-rmse:0.246728
[340]	train-rmse:0.227678	test-rmse:0.246566
[350]	train-rmse:0.227159	test-rmse:0.246462
[360]	train-rmse:0.226723	test-rmse:0.246322
[370]	train-rmse:0.226161	test-rmse:0.246169
[380]	train-rmse:0.225706	test-rmse:0.24605
[390]	train-rmse:0.225132	test-rmse:0.245885
[400]	train-rmse:0.224592	test-rmse:0.245742
[410]	train-rmse:0.224043	test-rmse:0.245613
[420]	train-rmse:0.223501	test-rmse:0.245463
[430]	train-rmse:0.222934	test-rmse:0.245284
[440]	train-rmse:0.222426	test-rmse:0.245123
[450]	train-rmse:0.22199	test-rmse:0.245041
[460]	train-rmse:0.221529	test-rmse:0.24491
[470]	train-rmse:0.220931	test-rmse:0.244827
[480]	train-rmse:0.220465	test-rmse:0.244727
[490]	train-rmse:0.220009	test-rmse:0.244577
[500]	train-rmse:0.219582	test-rmse:0.244533
[510]	train-rmse:0.219119	test-rmse:0.244477
[520]	train-rmse:0.218762	test-rmse:0.24438
[530]	train-rmse:0.218267	test-rmse:0.244257
[540]	train-rmse:0.217807	test-rmse:0.244189
[550]	train-rmse:0.21729	test-rmse:0.244056
[560]	train-rmse:0.216892	test-rmse:0.243971
[570]	train-rmse:0.216512	test-rmse:0.243893
[580]	train-rmse:0.216008	test-rmse:0.243847
[590]	train-rmse:0.215599	test-rmse:0.243797
[600]	train-rmse:0.215197	test-rmse:0.243688
[610]	train-rmse:0.214729	test-rmse:0.243622
[620]	train-rmse:0.214287	test-rmse:0.2436
[630]	train-rmse:0.21375	test-rmse:0.24352
[640]	train-rmse:0.213376	test-rmse:0.243462
[650]	train-rmse:0.212914	test-rmse:0.243358
[660]	train-rmse:0.212526	test-rmse:0.243306
[670]	train-rmse:0.2121	test-rmse:0.243273
[680]	train-rmse:0.211753	test-rmse:0.243218
[690]	train-rmse:0.211196	test-rmse:0.243158
[700]	train-rmse:0.210818	test-rmse:0.243108
[710]	train-rmse:0.21031	test-rmse:0.243034
[720]	train-rmse:0.209883	test-rmse:0.242965
[730]	train-rmse:0.209518	test-rmse:0.242922
[740]	train-rmse:0.209098	test-rmse:0.242887
[750]	train-rmse:0.208743	test-rmse:0.242847
[760]	train-rmse:0.20831	test-rmse:0.242761
[770]	train-rmse:0.207914	test-rmse:0.242716
[780]	train-rmse:0.207449	test-rmse:0.242661
[790]	train-rmse:0.207034	test-rmse:0.242613
[800]	train-rmse:0.206615	test-rmse:0.242582
[810]	train-rmse:0.206155	test-rmse:0.242569
[820]	train-rmse:0.205766	test-rmse:0.242545
[830]	train-rmse:0.205385	test-rmse:0.242542
[840]	train-rmse:0.204952	test-rmse:0.242495
[850]	train-rmse:0.204579	test-rmse:0.242442
[860]	train-rmse:0.204238	test-rmse:0.24241
[870]	train-rmse:0.203852	test-rmse:0.242421
[880]	train-rmse:0.203499	test-rmse:0.242415
[890]	train-rmse:0.203002	test-rmse:0.242345
[900]	train-rmse:0.202587	test-rmse:0.242283
[910]	train-rmse:0.202203	test-rmse:0.242266
[920]	train-rmse:0.201797	test-rmse:0.242268
[930]	train-rmse:0.201392	test-rmse:0.242245
[940]	train-rmse:0.201078	test-rmse:0.242223
[950]	train-rmse:0.200636	test-rmse:0.242214
[960]	train-rmse:0.200269	test-rmse:0.242189
[970]	train-rmse:0.199849	test-rmse:0.242138
[980]	train-rmse:0.199404	test-rmse:0.242085
[990]	train-rmse:0.19899	test-rmse:0.242075
[1000]	train-rmse:0.198611	test-rmse:0.242071
[1010]	train-rmse:0.198285	test-rmse:0.242041
[1020]	train-rmse:0.19793	test-rmse:0.242014
[1030]	train-rmse:0.197524	test-rmse:0.242008
[1040]	train-rmse:0.197101	test-rmse:0.241996
[1050]	train-rmse:0.196764	test-rmse:0.241975
[1060]	train-rmse:0.196369	test-rmse:0.241941
[1070]	train-rmse:0.196033	test-rmse:0.24191
[1080]	train-rmse:0.195688	test-rmse:0.241894
[1090]	train-rmse:0.195353	test-rmse:0.24191
[1100]	train-rmse:0.195059	test-rmse:0.241877
[1110]	train-rmse:0.194712	test-rmse:0.24189
[1120]	train-rmse:0.194296	test-rmse:0.241908
[1130]	train-rmse:0.193944	test-rmse:0.241876
[1140]	train-rmse:0.193627	test-rmse:0.241866
[1150]	train-rmse:0.193192	test-rmse:0.241824
[1160]	train-rmse:0.192791	test-rmse:0.241765
[1170]	train-rmse:0.192392	test-rmse:0.24177
[1180]	train-rmse:0.192046	test-rmse:0.241788
[1190]	train-rmse:0.19164	test-rmse:0.241733
[1200]	train-rmse:0.19121	test-rmse:0.24177
[1210]	train-rmse:0.190831	test-rmse:0.241776
[1220]	train-rmse:0.190466	test-rmse:0.241733
[1230]	train-rmse:0.190158	test-rmse:0.241718
[1240]	train-rmse:0.189785	test-rmse:0.241694
[1250]	train-rmse:0.189386	test-rmse:0.241653
[1260]	train-rmse:0.189065	test-rmse:0.241622
[1270]	train-rmse:0.188707	test-rmse:0.2416
[1280]	train-rmse:0.188393	test-rmse:0.241598
[1290]	train-rmse:0.188062	test-rmse:0.241624
[1300]	train-rmse:0.187737	test-rmse:0.241574
[1310]	train-rmse:0.187374	test-rmse:0.241539
[1320]	train-rmse:0.187057	test-rmse:0.241524
[1330]	train-rmse:0.18679	test-rmse:0.241496
[1340]	train-rmse:0.186368	test-rmse:0.241454
[1350]	train-rmse:0.18603	test-rmse:0.241418
[1360]	train-rmse:0.185676	test-rmse:0.241396
[1370]	train-rmse:0.185279	test-rmse:0.241362
[1380]	train-rmse:0.184928	test-rmse:0.241337
[1390]	train-rmse:0.184553	test-rmse:0.241352
[1400]	train-rmse:0.184219	test-rmse:0.241357
[1410]	train-rmse:0.183919	test-rmse:0.241342
[1420]	train-rmse:0.183587	test-rmse:0.241329
[1430]	train-rmse:0.183225	test-rmse:0.241306
[1440]	train-rmse:0.182893	test-rmse:0.241321
[1450]	train-rmse:0.182505	test-rmse:0.2413
[1460]	train-rmse:0.182212	test-rmse:0.241312
[1470]	train-rmse:0.181832	test-rmse:0.241267
[1480]	train-rmse:0.18149	test-rmse:0.241237
[1490]	train-rmse:0.181233	test-rmse:0.241228
[1500]	train-rmse:0.18091	test-rmse:0.24121
[1510]	train-rmse:0.180588	test-rmse:0.241214
[1520]	train-rmse:0.180233	test-rmse:0.241176
[1530]	train-rmse:0.179908	test-rmse:0.241156
[1540]	train-rmse:0.179541	test-rmse:0.24117
[1550]	train-rmse:0.179161	test-rmse:0.241143
[1560]	train-rmse:0.178862	test-rmse:0.241137
[1570]	train-rmse:0.178551	test-rmse:0.241136
[1580]	train-rmse:0.178269	test-rmse:0.241126
[1590]	train-rmse:0.177993	test-rmse:0.241145
[1600]	train-rmse:0.177657	test-rmse:0.241112
[1610]	train-rmse:0.177379	test-rmse:0.241116
[1620]	train-rmse:0.177043	test-rmse:0.241133
[1630]	train-rmse:0.176786	test-rmse:0.241105
[1640]	train-rmse:0.1765	test-rmse:0.241098
[1650]	train-rmse:0.176154	test-rmse:0.241119
[1660]	train-rmse:0.175793	test-rmse:0.241134
[1670]	train-rmse:0.175467	test-rmse:0.241099
[1680]	train-rmse:0.175183	test-rmse:0.241113
Stopping. Best iteration:
[1634]	train-rmse:0.176661	test-rmse:0.241082

[0.236917, 0.237903, 0.241082]
[0]	train-rmse:0.334174	test-rmse:0.334305
Multiple eval metrics have been passed: 'test-rmse' will be used for early stopping.

Will train until test-rmse hasn't improved in 50 rounds.
[10]	train-rmse:0.313902	test-rmse:0.314999
[20]	train-rmse:0.297838	test-rmse:0.299845
[30]	train-rmse:0.285369	test-rmse:0.288216
[40]	train-rmse:0.275706	test-rmse:0.279376
[50]	train-rmse:0.268343	test-rmse:0.272814
[60]	train-rmse:0.262616	test-rmse:0.267786
[70]	train-rmse:0.258222	test-rmse:0.264003
[80]	train-rmse:0.254555	test-rmse:0.260993
[90]	train-rmse:0.251595	test-rmse:0.258638
[100]	train-rmse:0.249068	test-rmse:0.256712
[110]	train-rmse:0.246978	test-rmse:0.255114
[120]	train-rmse:0.245168	test-rmse:0.253812
[130]	train-rmse:0.243527	test-rmse:0.252706
[140]	train-rmse:0.242164	test-rmse:0.251822
[150]	train-rmse:0.240996	test-rmse:0.251039
[160]	train-rmse:0.239942	test-rmse:0.250292
[170]	train-rmse:0.239006	test-rmse:0.249678
[180]	train-rmse:0.238119	test-rmse:0.249212
[190]	train-rmse:0.237242	test-rmse:0.248755
[200]	train-rmse:0.236347	test-rmse:0.24827
[210]	train-rmse:0.235594	test-rmse:0.247819
[220]	train-rmse:0.234902	test-rmse:0.247456
[230]	train-rmse:0.234307	test-rmse:0.247152
[240]	train-rmse:0.233557	test-rmse:0.246856
[250]	train-rmse:0.232824	test-rmse:0.246559
[260]	train-rmse:0.232262	test-rmse:0.246333
[270]	train-rmse:0.231668	test-rmse:0.246006
[280]	train-rmse:0.231081	test-rmse:0.245759
[290]	train-rmse:0.2306	test-rmse:0.245521
[300]	train-rmse:0.230032	test-rmse:0.245287
[310]	train-rmse:0.229422	test-rmse:0.245049
[320]	train-rmse:0.228894	test-rmse:0.244844
[330]	train-rmse:0.22844	test-rmse:0.244667
[340]	train-rmse:0.227889	test-rmse:0.244482
[350]	train-rmse:0.227376	test-rmse:0.244292
[360]	train-rmse:0.226825	test-rmse:0.244127
[370]	train-rmse:0.226391	test-rmse:0.243948
[380]	train-rmse:0.225768	test-rmse:0.243798
[390]	train-rmse:0.225398	test-rmse:0.243696
[400]	train-rmse:0.224841	test-rmse:0.243533
[410]	train-rmse:0.224471	test-rmse:0.243421
[420]	train-rmse:0.224018	test-rmse:0.243319
[430]	train-rmse:0.223474	test-rmse:0.243177
[440]	train-rmse:0.223017	test-rmse:0.243064
[450]	train-rmse:0.22253	test-rmse:0.242972
[460]	train-rmse:0.222012	test-rmse:0.242831
[470]	train-rmse:0.221533	test-rmse:0.242677
[480]	train-rmse:0.221113	test-rmse:0.242612
[490]	train-rmse:0.220716	test-rmse:0.242517
[500]	train-rmse:0.220324	test-rmse:0.242452
[510]	train-rmse:0.219827	test-rmse:0.242355
[520]	train-rmse:0.21942	test-rmse:0.242287
[530]	train-rmse:0.218875	test-rmse:0.242201
[540]	train-rmse:0.218416	test-rmse:0.242125
[550]	train-rmse:0.217923	test-rmse:0.242036
[560]	train-rmse:0.217414	test-rmse:0.241983
[570]	train-rmse:0.216867	test-rmse:0.241931
[580]	train-rmse:0.216476	test-rmse:0.241865
[590]	train-rmse:0.216038	test-rmse:0.241803
[600]	train-rmse:0.215578	test-rmse:0.241758
[610]	train-rmse:0.215206	test-rmse:0.241702
[620]	train-rmse:0.214775	test-rmse:0.241616
[630]	train-rmse:0.214351	test-rmse:0.241543
[640]	train-rmse:0.2139	test-rmse:0.24149
[650]	train-rmse:0.213579	test-rmse:0.241466
[660]	train-rmse:0.213067	test-rmse:0.241467
[670]	train-rmse:0.212669	test-rmse:0.241413
[680]	train-rmse:0.212174	test-rmse:0.241265
[690]	train-rmse:0.211663	test-rmse:0.241275
[700]	train-rmse:0.211179	test-rmse:0.241217
[710]	train-rmse:0.210764	test-rmse:0.241172
[720]	train-rmse:0.210291	test-rmse:0.241142
[730]	train-rmse:0.209852	test-rmse:0.241112
[740]	train-rmse:0.209428	test-rmse:0.241095
[750]	train-rmse:0.209036	test-rmse:0.241017
[760]	train-rmse:0.20858	test-rmse:0.240993
[770]	train-rmse:0.208189	test-rmse:0.240951
[780]	train-rmse:0.2078	test-rmse:0.240903
[790]	train-rmse:0.207366	test-rmse:0.240872
[800]	train-rmse:0.207039	test-rmse:0.240834
[810]	train-rmse:0.206656	test-rmse:0.240816
[820]	train-rmse:0.206255	test-rmse:0.240817
[830]	train-rmse:0.205857	test-rmse:0.240778
[840]	train-rmse:0.205482	test-rmse:0.240743
[850]	train-rmse:0.20514	test-rmse:0.240729
[860]	train-rmse:0.204796	test-rmse:0.240706
[870]	train-rmse:0.204356	test-rmse:0.240709
[880]	train-rmse:0.203959	test-rmse:0.240673
[890]	train-rmse:0.203482	test-rmse:0.240639
[900]	train-rmse:0.203066	test-rmse:0.240621
[910]	train-rmse:0.202656	test-rmse:0.240581
[920]	train-rmse:0.202166	test-rmse:0.240553
[930]	train-rmse:0.201792	test-rmse:0.240506
[940]	train-rmse:0.201464	test-rmse:0.240487
[950]	train-rmse:0.201108	test-rmse:0.240461
[960]	train-rmse:0.200743	test-rmse:0.240433
[970]	train-rmse:0.200381	test-rmse:0.240391
[980]	train-rmse:0.200037	test-rmse:0.240374
[990]	train-rmse:0.199645	test-rmse:0.240369
[1000]	train-rmse:0.199288	test-rmse:0.24033
[1010]	train-rmse:0.19893	test-rmse:0.240291
[1020]	train-rmse:0.198615	test-rmse:0.240279
[1030]	train-rmse:0.198251	test-rmse:0.240254
[1040]	train-rmse:0.197927	test-rmse:0.240234
[1050]	train-rmse:0.197566	test-rmse:0.24021
[1060]	train-rmse:0.197217	test-rmse:0.24016
[1070]	train-rmse:0.196809	test-rmse:0.240144
[1080]	train-rmse:0.196438	test-rmse:0.240119
[1090]	train-rmse:0.196058	test-rmse:0.240054
[1100]	train-rmse:0.195762	test-rmse:0.240034
[1110]	train-rmse:0.195379	test-rmse:0.240017
[1120]	train-rmse:0.19498	test-rmse:0.240014
[1130]	train-rmse:0.194639	test-rmse:0.240022
[1140]	train-rmse:0.194256	test-rmse:0.240022
[1150]	train-rmse:0.193868	test-rmse:0.240008
[1160]	train-rmse:0.193475	test-rmse:0.240016
[1170]	train-rmse:0.193095	test-rmse:0.239977
[1180]	train-rmse:0.19279	test-rmse:0.239969
[1190]	train-rmse:0.192485	test-rmse:0.239953
[1200]	train-rmse:0.192193	test-rmse:0.239942
[1210]	train-rmse:0.191774	test-rmse:0.239935
[1220]	train-rmse:0.191359	test-rmse:0.239914
[1230]	train-rmse:0.19103	test-rmse:0.239913
[1240]	train-rmse:0.19062	test-rmse:0.239908
[1250]	train-rmse:0.190279	test-rmse:0.239885
[1260]	train-rmse:0.189997	test-rmse:0.239871
[1270]	train-rmse:0.18961	test-rmse:0.239834
[1280]	train-rmse:0.189285	test-rmse:0.239792
[1290]	train-rmse:0.188924	test-rmse:0.239737
[1300]	train-rmse:0.188549	test-rmse:0.239731
[1310]	train-rmse:0.188163	test-rmse:0.239776
[1320]	train-rmse:0.187886	test-rmse:0.239746
[1330]	train-rmse:0.187599	test-rmse:0.239774
[1340]	train-rmse:0.187323	test-rmse:0.239768
Stopping. Best iteration:
[1298]	train-rmse:0.188641	test-rmse:0.239728

[0.236917, 0.237903, 0.241082, 0.239728]
[0]	train-rmse:0.334197	test-rmse:0.334181
Multiple eval metrics have been passed: 'test-rmse' will be used for early stopping.

Will train until test-rmse hasn't improved in 50 rounds.
[10]	train-rmse:0.3137	test-rmse:0.314467
[20]	train-rmse:0.297592	test-rmse:0.298971
[30]	train-rmse:0.285156	test-rmse:0.287158
[40]	train-rmse:0.275657	test-rmse:0.278319
[50]	train-rmse:0.268419	test-rmse:0.271616
[60]	train-rmse:0.26263	test-rmse:0.26647
[70]	train-rmse:0.258137	test-rmse:0.26244
[80]	train-rmse:0.254595	test-rmse:0.259391
[90]	train-rmse:0.251762	test-rmse:0.257018
[100]	train-rmse:0.249368	test-rmse:0.255084
[110]	train-rmse:0.247438	test-rmse:0.253542
[120]	train-rmse:0.245655	test-rmse:0.252226
[130]	train-rmse:0.244135	test-rmse:0.251043
[140]	train-rmse:0.242673	test-rmse:0.250014
[150]	train-rmse:0.24139	test-rmse:0.249142
[160]	train-rmse:0.240305	test-rmse:0.248431
[170]	train-rmse:0.239308	test-rmse:0.247741
[180]	train-rmse:0.238378	test-rmse:0.247156
[190]	train-rmse:0.237493	test-rmse:0.246591
[200]	train-rmse:0.236548	test-rmse:0.246042
[210]	train-rmse:0.235829	test-rmse:0.245645
[220]	train-rmse:0.23509	test-rmse:0.245283
[230]	train-rmse:0.234344	test-rmse:0.244922
[240]	train-rmse:0.233581	test-rmse:0.244592
[250]	train-rmse:0.232959	test-rmse:0.244311
[260]	train-rmse:0.232375	test-rmse:0.244017
[270]	train-rmse:0.23181	test-rmse:0.243784
[280]	train-rmse:0.231231	test-rmse:0.243544
[290]	train-rmse:0.230676	test-rmse:0.24338
[300]	train-rmse:0.230062	test-rmse:0.243159
[310]	train-rmse:0.229459	test-rmse:0.242955
[320]	train-rmse:0.228974	test-rmse:0.242775
[330]	train-rmse:0.228381	test-rmse:0.242591
[340]	train-rmse:0.227911	test-rmse:0.242392
[350]	train-rmse:0.227316	test-rmse:0.242238
[360]	train-rmse:0.226753	test-rmse:0.242105
[370]	train-rmse:0.226295	test-rmse:0.241969
[380]	train-rmse:0.225747	test-rmse:0.241822
[390]	train-rmse:0.225242	test-rmse:0.241719
[400]	train-rmse:0.224791	test-rmse:0.241594
[410]	train-rmse:0.224242	test-rmse:0.241473
[420]	train-rmse:0.223807	test-rmse:0.241395
[430]	train-rmse:0.223354	test-rmse:0.241284
[440]	train-rmse:0.222918	test-rmse:0.241178
[450]	train-rmse:0.222414	test-rmse:0.24107
[460]	train-rmse:0.221997	test-rmse:0.241002
[470]	train-rmse:0.221497	test-rmse:0.240912
[480]	train-rmse:0.221012	test-rmse:0.240851
[490]	train-rmse:0.220556	test-rmse:0.240751
[500]	train-rmse:0.220106	test-rmse:0.240689
[510]	train-rmse:0.219683	test-rmse:0.240635
[520]	train-rmse:0.219229	test-rmse:0.24057
[530]	train-rmse:0.218733	test-rmse:0.240506
[540]	train-rmse:0.218241	test-rmse:0.240432
[550]	train-rmse:0.217806	test-rmse:0.240351
[560]	train-rmse:0.217408	test-rmse:0.24029
[570]	train-rmse:0.216963	test-rmse:0.240221
[580]	train-rmse:0.21651	test-rmse:0.240186
[590]	train-rmse:0.21615	test-rmse:0.24016
[600]	train-rmse:0.215748	test-rmse:0.240121
[610]	train-rmse:0.215208	test-rmse:0.240063
[620]	train-rmse:0.214673	test-rmse:0.240027
[630]	train-rmse:0.214264	test-rmse:0.239981
[640]	train-rmse:0.213741	test-rmse:0.239942
[650]	train-rmse:0.213372	test-rmse:0.239886
[660]	train-rmse:0.213027	test-rmse:0.239803
[670]	train-rmse:0.212634	test-rmse:0.239775
[680]	train-rmse:0.212186	test-rmse:0.239767
[690]	train-rmse:0.211781	test-rmse:0.239745
[700]	train-rmse:0.211437	test-rmse:0.239699
[710]	train-rmse:0.210975	test-rmse:0.239706
[720]	train-rmse:0.210567	test-rmse:0.239662
[730]	train-rmse:0.210079	test-rmse:0.239633
[740]	train-rmse:0.209702	test-rmse:0.239581
[750]	train-rmse:0.209276	test-rmse:0.239538
[760]	train-rmse:0.208815	test-rmse:0.23951
[770]	train-rmse:0.208346	test-rmse:0.239458
[780]	train-rmse:0.207914	test-rmse:0.239385
[790]	train-rmse:0.207545	test-rmse:0.239335
[800]	train-rmse:0.207089	test-rmse:0.23929
[810]	train-rmse:0.206759	test-rmse:0.239285
[820]	train-rmse:0.206338	test-rmse:0.239255
[830]	train-rmse:0.205965	test-rmse:0.23924
[840]	train-rmse:0.20559	test-rmse:0.239258
[850]	train-rmse:0.205231	test-rmse:0.239197
[860]	train-rmse:0.204899	test-rmse:0.23919
[870]	train-rmse:0.20447	test-rmse:0.239143
[880]	train-rmse:0.204032	test-rmse:0.23909
[890]	train-rmse:0.20376	test-rmse:0.239065
[900]	train-rmse:0.203338	test-rmse:0.239047
[910]	train-rmse:0.202984	test-rmse:0.239055
[920]	train-rmse:0.202576	test-rmse:0.239037
[930]	train-rmse:0.202185	test-rmse:0.239028
[940]	train-rmse:0.201783	test-rmse:0.239015
[950]	train-rmse:0.201425	test-rmse:0.239005
[960]	train-rmse:0.201072	test-rmse:0.238987
[970]	train-rmse:0.200657	test-rmse:0.238969
[980]	train-rmse:0.200298	test-rmse:0.238942
[990]	train-rmse:0.199908	test-rmse:0.23894
[1000]	train-rmse:0.199505	test-rmse:0.23895
[1010]	train-rmse:0.199153	test-rmse:0.238943
[1020]	train-rmse:0.198767	test-rmse:0.238928
[1030]	train-rmse:0.198393	test-rmse:0.23894
[1040]	train-rmse:0.198007	test-rmse:0.238934
[1050]	train-rmse:0.197561	test-rmse:0.238905
[1060]	train-rmse:0.197231	test-rmse:0.238898
[1070]	train-rmse:0.196874	test-rmse:0.238874
[1080]	train-rmse:0.196534	test-rmse:0.238869
[1090]	train-rmse:0.196143	test-rmse:0.238838
[1100]	train-rmse:0.195784	test-rmse:0.23884
[1110]	train-rmse:0.195372	test-rmse:0.238827
[1120]	train-rmse:0.194991	test-rmse:0.238753
[1130]	train-rmse:0.194687	test-rmse:0.238748
[1140]	train-rmse:0.194261	test-rmse:0.238753
[1150]	train-rmse:0.193865	test-rmse:0.238758
[1160]	train-rmse:0.193477	test-rmse:0.238746
[1170]	train-rmse:0.193084	test-rmse:0.23872
[1180]	train-rmse:0.19266	test-rmse:0.23868
[1190]	train-rmse:0.192264	test-rmse:0.238703
[1200]	train-rmse:0.19194	test-rmse:0.238671
[1210]	train-rmse:0.191586	test-rmse:0.238638
[1220]	train-rmse:0.191276	test-rmse:0.238613
[1230]	train-rmse:0.190867	test-rmse:0.238626
[1240]	train-rmse:0.190535	test-rmse:0.23862
[1250]	train-rmse:0.190215	test-rmse:0.238623
[1260]	train-rmse:0.189808	test-rmse:0.238614
[1270]	train-rmse:0.189531	test-rmse:0.238644
Stopping. Best iteration:
[1222]	train-rmse:0.19118	test-rmse:0.23859

[0.236917, 0.237903, 0.241082, 0.239728, 0.23859]
0.238848

In [19]:
dfs1 = run_to_stackdf(rv1)
pickle.dump(dfs1, open('modeloutput-xgb-reg-r2.pkl', 'wb'))

In [ ]: