In [1]:
import pandas as pd
import numpy as np
import tensorflow as tf
from sklearn.cross_validation import train_test_split
import xgboost as xgb
from scipy import sparse
from sklearn.feature_extraction import FeatureHasher
from scipy.sparse import coo_matrix,csr_matrix,csc_matrix, hstack
from sklearn.preprocessing import normalize
from sklearn.utils import shuffle
from sklearn import linear_model
import gc
from sklearn import preprocessing

In [2]:
import matplotlib.pyplot as plt
%matplotlib inline

In [25]:
%ls


1.5_create_lag.ipynb              preprocessed_products.csv
1_predata.ipynb                   RF_model/
1_predata_whole.ipynb             ruta_for_cliente_producto.csv
3_xgb_43fea.ipynb                 stack_sub/
3_xgb.ipynb                       submission_10_new.csv
3_xgb_prediction.ipynb            submission_11_new.csv
3_xgb_test.ipynb                  submission_44fea.csv
4_keras_nn.ipynb                  submission_all_train.csv
5_random_forest.ipynb             submission_nn.csv
6_stack_model.ipynb               submission_nn_xgb
7_SGD_regressor.ipynb             train_pivot_45678_to_9_whole_zero.csv
8_svm_linearSVR.ipynb             train_pivot_56789_to_10_44fea.pickle
agencia_for_cliente_producto.csv  train_pivot_56789_to_10_44fea_zero.pickle
bst_use_all_train.model           train_pivot_56789_to_10_new.pickle
canal_for_cliente_producto.csv    train_pivot_6789_to_11_new.pickle
old_submission/                   train_pivot_xgb_time1_44fea.csv
origin/                           train_pivot_xgb_time1_44fea_zero.csv
pivot_test.pickle                 train_pivot_xgb_time1.pickle
pivot_train_with_nan.pickle       train_pivot_xgb_time2_38fea.csv

begin training, for week 11



In [18]:
predictors_target_11 = ['LR_prod', 'LR_prod_corr',
       'NombreCliente',
       'agen_cliente_for_log_de', 'agen_for_log_de',
       'agen_producto_for_log_de', 'agen_ruta_for_log_de',
       'cliente_for_log_de', 'cliente_for_log_sum',
       'cliente_producto_for_log_de', 'corr', 'pieces',
       'producto_for_log_de', 'ruta_cliente_for_log_de', 'ruta_for_log_de',
       'ruta_producto_for_log_de', 't2_min_t3', 't2_min_t4', 't2_min_t5',
       't3_min_t4', 't3_min_t5', 't4_min_t5', 't_m_2_cum', 't_m_3_cum',
       't_m_4_cum', 't_m_5_cum', 't_min_2', 't_min_3', 't_min_4',
       't_min_5', 'target', 'weight', 'weight_per_piece']

In [19]:
predictors_11 = ['LR_prod', 'LR_prod_corr',
       'NombreCliente',
       'agen_cliente_for_log_de', 'agen_for_log_de',
       'agen_producto_for_log_de', 'agen_ruta_for_log_de',
       'cliente_for_log_de', 'cliente_for_log_sum',
       'cliente_producto_for_log_de', 'corr', 'pieces',
       'producto_for_log_de', 'ruta_cliente_for_log_de', 'ruta_for_log_de',
       'ruta_producto_for_log_de', 't2_min_t3', 't2_min_t4', 't2_min_t5',
       't3_min_t4', 't3_min_t5', 't4_min_t5', 't_m_2_cum', 't_m_3_cum',
       't_m_4_cum', 't_m_5_cum', 't_min_2', 't_min_3', 't_min_4',
       't_min_5', 'weight', 'weight_per_piece']

In [10]:
f = lambda x : (x-x.mean())/x.std(ddof=0)

In [14]:
train_pivot_xgb_time2 = pd.read_csv('train_pivot_xgb_time2.csv',index_col = 0)

In [7]:
train_pivot_6789_to_11 = pd.read_pickle('train_pivot_6789_to_11_new.pickle')

In [8]:
train_pivot_xgb_time2.head()


Out[8]:
Agencia_ID Canal_ID Cliente_ID LR_prod LR_prod_corr NombreCliente Producto_ID Ruta_SAK agen_cliente_for_log_de agen_for_log_de ... t_m_3_cum t_m_4_cum t_m_5_cum t_min_2 t_min_3 t_min_4 t_min_5 target weight weight_per_piece
0 2061 2 26 2.001190 7.293554 18434 1182 7212 2.852285 3.491654 ... NaN NaN 3.688879 NaN NaN NaN 3.688879 0.000000 210.0 210.00
1 2061 2 26 1.839411 6.703932 18434 4767 7212 2.852285 3.491654 ... NaN NaN 3.761200 NaN NaN NaN 3.761200 3.761200 250.0 NaN
2 2061 2 26 1.911283 6.965878 18434 31393 7212 2.852285 3.491654 ... 8.650325 5.877736 3.044522 2.772589 2.772589 2.833213 3.044522 3.135494 640.0 NaN
3 2061 2 26 3.113374 11.347029 18434 34204 7212 2.852285 3.491654 ... 11.024839 7.218177 3.784190 3.555348 3.806662 3.433987 3.784190 3.828641 450.0 56.25
4 2061 2 26 2.031231 7.403043 18434 34206 7212 2.852285 3.491654 ... 12.963710 9.202510 4.795791 4.248495 3.761200 4.406719 4.795791 4.499810 340.0 42.50

5 rows × 38 columns


In [15]:
train_pivot_xgb_time2.columns.values


Out[15]:
array(['Agencia_ID', 'Canal_ID', 'Cliente_ID', 'LR_prod', 'LR_prod_corr',
       'NombreCliente', 'Producto_ID', 'Ruta_SAK',
       'agen_cliente_for_log_de', 'agen_for_log_de',
       'agen_producto_for_log_de', 'agen_ruta_for_log_de',
       'cliente_for_log_de', 'cliente_for_log_sum',
       'cliente_producto_for_log_de', 'corr', 'pieces',
       'producto_for_log_de', 'ruta_cliente_for_log_de', 'ruta_for_log_de',
       'ruta_producto_for_log_de', 't2_min_t3', 't2_min_t4', 't2_min_t5',
       't3_min_t4', 't3_min_t5', 't4_min_t5', 't_m_2_cum', 't_m_3_cum',
       't_m_4_cum', 't_m_5_cum', 't_min_2', 't_min_3', 't_min_4',
       't_min_5', 'target', 'weight', 'weight_per_piece'], dtype=object)

In [4]:
def normalize_dataset(train_dataset,test_dataset):
    train_dataset_normalize = train_dataset[predictors_11].copy()
    train_dataset_normalize['label'] = 0    
    
    test_dataset_normalize = test_dataset[predictors_11].copy()
    test_dataset_normalize['label'] = 1
    
    whole_dataset = pd.concat([train_dataset_normalize,test_dataset_normalize])
    whole_dataset_normalize = whole_dataset.apply(f,axis = 0)
    
    train_dataset_normalize = whole_dataset_normalize.loc[whole_dataset['label'] == 0]
    test_dataset_normalize = whole_dataset_normalize.loc[whole_dataset['label']==1]
    
    train_dataset_normalize.drop(['label'],axis = 1,inplace = True)
    test_dataset_normalize.drop(['label'],axis =1,inplace = True)
    
    train_dataset_normalize['target'] = train_dataset['target'].copy() 
    
#     target = train_dataset['target']
    return train_dataset_normalize,test_dataset_normalize

In [21]:
train_dataset_normalize, test_dataset_normalize = normalize_dataset(train_pivot_xgb_time2,train_pivot_6789_to_11)


/usr/local/lib/python2.7/dist-packages/ipykernel/__main__.py:14: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
/usr/local/lib/python2.7/dist-packages/ipykernel/__main__.py:15: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
/usr/local/lib/python2.7/dist-packages/ipykernel/__main__.py:17: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy

In [22]:
train_dataset_normalize.head()


Out[22]:
Semana LR_prod LR_prod_corr NombreCliente agen_cliente_for_log_de agen_for_log_de agen_producto_for_log_de agen_ruta_for_log_de cliente_for_log_de cliente_for_log_sum cliente_producto_for_log_de ... t_m_3_cum t_m_4_cum t_m_5_cum t_min_2 t_min_3 t_min_4 t_min_5 weight weight_per_piece target
0 0.440004 0.007984 -1.198863 2.893915 7.040262 4.922515 3.707101 2.951726 0.023732 2.468511 ... NaN NaN 0.841755 NaN NaN NaN 2.148223 0.041948 0.552880 0.000000
1 0.136858 0.004843 -1.198863 2.893915 7.040262 4.561988 3.707101 2.951726 0.023732 2.558317 ... NaN NaN 0.888582 NaN NaN NaN 2.230611 0.180385 NaN 3.761200
2 0.271533 0.006239 -1.198863 2.893915 7.040262 3.000979 3.707101 2.951726 0.023732 1.433925 ... 1.028141 0.851082 0.424536 1.090852 1.086921 1.149949 1.414172 1.530144 NaN 3.135494
3 2.524041 0.029578 -1.198863 2.893915 7.040262 3.917930 3.707101 2.951726 0.023732 2.414081 ... 1.752516 1.424506 0.903467 1.968861 2.245187 1.825675 2.256800 0.872569 -0.290791 3.828641
4 0.496296 0.008568 -1.198863 2.893915 7.040262 5.719378 3.707101 2.951726 0.023732 3.231175 ... 2.343992 2.273380 1.558475 2.746354 2.194265 2.919764 3.409216 0.491868 -0.366242 4.499810

5 rows × 33 columns


In [26]:
train_pivot_xgb_time2_sample = train_dataset_normalize.sample(2000000)

train_feature_11 = train_pivot_xgb_time2_sample.drop(['target'],axis = 1)
train_label_11 = train_pivot_xgb_time2_sample[['target']]

dtrain_11 = xgb.DMatrix(train_feature_11,label = train_label_11,missing=np.nan)

In [27]:
num_round = 1000

cvresult = xgb.cv(param_11, dtrain_11, num_round, nfold=5,verbose_eval = 1,show_stdv=False,
                        seed = 0, early_stopping_rounds=5)
print(cvresult.tail())


[0]	train-rmse:1.14019	test-rmse:1.14021
[1]	train-rmse:0.961458	test-rmse:0.961473
[2]	train-rmse:0.824843	test-rmse:0.824873
[3]	train-rmse:0.722328	test-rmse:0.722397
[4]	train-rmse:0.647489	test-rmse:0.647603
[5]	train-rmse:0.592862	test-rmse:0.593041
[6]	train-rmse:0.555021	test-rmse:0.555254
[7]	train-rmse:0.528662	test-rmse:0.528924
[8]	train-rmse:0.510662	test-rmse:0.510977
[9]	train-rmse:0.498551	test-rmse:0.498903
[10]	train-rmse:0.49003	test-rmse:0.490423
[11]	train-rmse:0.484163	test-rmse:0.484588
[12]	train-rmse:0.480003	test-rmse:0.480457
[13]	train-rmse:0.477039	test-rmse:0.47752
[14]	train-rmse:0.474897	test-rmse:0.475405
[15]	train-rmse:0.473305	test-rmse:0.473844
[16]	train-rmse:0.472061	test-rmse:0.472641
[17]	train-rmse:0.471142	test-rmse:0.471752
[18]	train-rmse:0.470369	test-rmse:0.471
[19]	train-rmse:0.469729	test-rmse:0.470375
[20]	train-rmse:0.469123	test-rmse:0.469783
[21]	train-rmse:0.468683	test-rmse:0.469369
[22]	train-rmse:0.468239	test-rmse:0.468944
[23]	train-rmse:0.467862	test-rmse:0.468596
[24]	train-rmse:0.467516	test-rmse:0.468284
[25]	train-rmse:0.467183	test-rmse:0.467968
[26]	train-rmse:0.46683	test-rmse:0.467642
[27]	train-rmse:0.466558	test-rmse:0.467384
[28]	train-rmse:0.466233	test-rmse:0.467087
[29]	train-rmse:0.465967	test-rmse:0.46684
[30]	train-rmse:0.46573	test-rmse:0.466622
[31]	train-rmse:0.465487	test-rmse:0.466401
[32]	train-rmse:0.465309	test-rmse:0.466247
[33]	train-rmse:0.465145	test-rmse:0.466103
[34]	train-rmse:0.464955	test-rmse:0.465931
[35]	train-rmse:0.464779	test-rmse:0.465778
[36]	train-rmse:0.464622	test-rmse:0.465647
[37]	train-rmse:0.46445	test-rmse:0.465496
[38]	train-rmse:0.464265	test-rmse:0.465335
[39]	train-rmse:0.464089	test-rmse:0.46519
[40]	train-rmse:0.463953	test-rmse:0.465087
[41]	train-rmse:0.463803	test-rmse:0.464965
[42]	train-rmse:0.463669	test-rmse:0.46485
[43]	train-rmse:0.463526	test-rmse:0.46473
[44]	train-rmse:0.463345	test-rmse:0.46458
[45]	train-rmse:0.46323	test-rmse:0.464488
[46]	train-rmse:0.463083	test-rmse:0.46436
[47]	train-rmse:0.462961	test-rmse:0.464254
[48]	train-rmse:0.462844	test-rmse:0.464164
[49]	train-rmse:0.462671	test-rmse:0.464006
[50]	train-rmse:0.462573	test-rmse:0.463937
[51]	train-rmse:0.462433	test-rmse:0.463811
[52]	train-rmse:0.462326	test-rmse:0.463717
[53]	train-rmse:0.462213	test-rmse:0.46363
[54]	train-rmse:0.462113	test-rmse:0.463553
[55]	train-rmse:0.462006	test-rmse:0.463481
[56]	train-rmse:0.461917	test-rmse:0.463415
[57]	train-rmse:0.461803	test-rmse:0.463323
[58]	train-rmse:0.461723	test-rmse:0.463264
[59]	train-rmse:0.461629	test-rmse:0.463187
[60]	train-rmse:0.461547	test-rmse:0.463137
[61]	train-rmse:0.461431	test-rmse:0.46305
[62]	train-rmse:0.461339	test-rmse:0.462979
[63]	train-rmse:0.461268	test-rmse:0.462928
[64]	train-rmse:0.461137	test-rmse:0.462813
[65]	train-rmse:0.461038	test-rmse:0.462732
[66]	train-rmse:0.460964	test-rmse:0.462681
[67]	train-rmse:0.460849	test-rmse:0.462587
[68]	train-rmse:0.460744	test-rmse:0.462503
[69]	train-rmse:0.460662	test-rmse:0.462446
[70]	train-rmse:0.460593	test-rmse:0.462395
[71]	train-rmse:0.460521	test-rmse:0.462347
[72]	train-rmse:0.460432	test-rmse:0.462281
[73]	train-rmse:0.460351	test-rmse:0.46221
[74]	train-rmse:0.46028	test-rmse:0.462161
[75]	train-rmse:0.460207	test-rmse:0.462113
[76]	train-rmse:0.460113	test-rmse:0.462038
[77]	train-rmse:0.460044	test-rmse:0.461992
[78]	train-rmse:0.459984	test-rmse:0.461954
[79]	train-rmse:0.459917	test-rmse:0.461911
[80]	train-rmse:0.45985	test-rmse:0.461865
[81]	train-rmse:0.459802	test-rmse:0.461841
[82]	train-rmse:0.459757	test-rmse:0.461811
[83]	train-rmse:0.4597	test-rmse:0.461779
[84]	train-rmse:0.459629	test-rmse:0.461723
[85]	train-rmse:0.459556	test-rmse:0.46167
[86]	train-rmse:0.45951	test-rmse:0.461643
[87]	train-rmse:0.459459	test-rmse:0.461609
[88]	train-rmse:0.459389	test-rmse:0.461553
[89]	train-rmse:0.459321	test-rmse:0.461508
[90]	train-rmse:0.45926	test-rmse:0.46146
[91]	train-rmse:0.459208	test-rmse:0.461435
[92]	train-rmse:0.459144	test-rmse:0.461394
[93]	train-rmse:0.459064	test-rmse:0.461334
[94]	train-rmse:0.458977	test-rmse:0.461258
[95]	train-rmse:0.458892	test-rmse:0.461193
[96]	train-rmse:0.45883	test-rmse:0.461158
[97]	train-rmse:0.458763	test-rmse:0.461109
[98]	train-rmse:0.458715	test-rmse:0.461084
[99]	train-rmse:0.458667	test-rmse:0.461051
[100]	train-rmse:0.45862	test-rmse:0.461025
[101]	train-rmse:0.458561	test-rmse:0.460985
[102]	train-rmse:0.458485	test-rmse:0.460929
[103]	train-rmse:0.458429	test-rmse:0.46089
[104]	train-rmse:0.458358	test-rmse:0.46085
[105]	train-rmse:0.458298	test-rmse:0.46081
[106]	train-rmse:0.458249	test-rmse:0.460781
[107]	train-rmse:0.458208	test-rmse:0.460761
[108]	train-rmse:0.458152	test-rmse:0.460726
[109]	train-rmse:0.458104	test-rmse:0.460691
[110]	train-rmse:0.458062	test-rmse:0.460668
[111]	train-rmse:0.458015	test-rmse:0.46064
[112]	train-rmse:0.45795	test-rmse:0.460591
[113]	train-rmse:0.457892	test-rmse:0.460552
[114]	train-rmse:0.457838	test-rmse:0.460513
[115]	train-rmse:0.457792	test-rmse:0.460485
[116]	train-rmse:0.457736	test-rmse:0.460451
[117]	train-rmse:0.457677	test-rmse:0.460414
[118]	train-rmse:0.457629	test-rmse:0.460385
[119]	train-rmse:0.457591	test-rmse:0.460364
[120]	train-rmse:0.457548	test-rmse:0.460338
[121]	train-rmse:0.457504	test-rmse:0.460321
[122]	train-rmse:0.457451	test-rmse:0.46029
[123]	train-rmse:0.4574	test-rmse:0.460258
[124]	train-rmse:0.457357	test-rmse:0.460242
[125]	train-rmse:0.457313	test-rmse:0.460215
[126]	train-rmse:0.457275	test-rmse:0.460195
[127]	train-rmse:0.457239	test-rmse:0.460181
[128]	train-rmse:0.457175	test-rmse:0.460133
[129]	train-rmse:0.457127	test-rmse:0.460101
[130]	train-rmse:0.457072	test-rmse:0.460069
[131]	train-rmse:0.45704	test-rmse:0.460059
[132]	train-rmse:0.456989	test-rmse:0.460023
[133]	train-rmse:0.456961	test-rmse:0.46001
[134]	train-rmse:0.456914	test-rmse:0.459983
[135]	train-rmse:0.456855	test-rmse:0.459941
[136]	train-rmse:0.4568	test-rmse:0.459908
[137]	train-rmse:0.456758	test-rmse:0.459893
[138]	train-rmse:0.456722	test-rmse:0.459872
[139]	train-rmse:0.456691	test-rmse:0.459851
[140]	train-rmse:0.456637	test-rmse:0.459813
[141]	train-rmse:0.456591	test-rmse:0.459779
[142]	train-rmse:0.456547	test-rmse:0.459753
[143]	train-rmse:0.456497	test-rmse:0.459717
[144]	train-rmse:0.456462	test-rmse:0.4597
[145]	train-rmse:0.456428	test-rmse:0.459689
[146]	train-rmse:0.45639	test-rmse:0.459668
[147]	train-rmse:0.456343	test-rmse:0.459636
[148]	train-rmse:0.456296	test-rmse:0.459606
[149]	train-rmse:0.456264	test-rmse:0.459592
[150]	train-rmse:0.456216	test-rmse:0.459563
[151]	train-rmse:0.45618	test-rmse:0.459541
[152]	train-rmse:0.456143	test-rmse:0.459521
[153]	train-rmse:0.456107	test-rmse:0.459505
[154]	train-rmse:0.45607	test-rmse:0.45948
[155]	train-rmse:0.456035	test-rmse:0.459458
[156]	train-rmse:0.455993	test-rmse:0.459439
[157]	train-rmse:0.455953	test-rmse:0.459421
[158]	train-rmse:0.455928	test-rmse:0.459423
[159]	train-rmse:0.455896	test-rmse:0.459411
[160]	train-rmse:0.455849	test-rmse:0.459382
[161]	train-rmse:0.455798	test-rmse:0.459353
[162]	train-rmse:0.455758	test-rmse:0.459332
[163]	train-rmse:0.45572	test-rmse:0.459312
[164]	train-rmse:0.455671	test-rmse:0.459274
[165]	train-rmse:0.455643	test-rmse:0.459267
[166]	train-rmse:0.455599	test-rmse:0.459238
[167]	train-rmse:0.455578	test-rmse:0.459229
[168]	train-rmse:0.455557	test-rmse:0.459222
[169]	train-rmse:0.455519	test-rmse:0.459205
[170]	train-rmse:0.455479	test-rmse:0.459182
[171]	train-rmse:0.455424	test-rmse:0.459142
[172]	train-rmse:0.455386	test-rmse:0.459117
[173]	train-rmse:0.455333	test-rmse:0.459082
[174]	train-rmse:0.455288	test-rmse:0.459061
[175]	train-rmse:0.455247	test-rmse:0.459037
[176]	train-rmse:0.455217	test-rmse:0.459022
[177]	train-rmse:0.455182	test-rmse:0.459006
[178]	train-rmse:0.45515	test-rmse:0.458995
[179]	train-rmse:0.455126	test-rmse:0.458987
[180]	train-rmse:0.455077	test-rmse:0.458954
[181]	train-rmse:0.455041	test-rmse:0.45894
[182]	train-rmse:0.455009	test-rmse:0.458925
[183]	train-rmse:0.454971	test-rmse:0.458911
[184]	train-rmse:0.454937	test-rmse:0.458897
[185]	train-rmse:0.454906	test-rmse:0.458882
[186]	train-rmse:0.454867	test-rmse:0.458865
[187]	train-rmse:0.454838	test-rmse:0.458856
[188]	train-rmse:0.454808	test-rmse:0.458839
[189]	train-rmse:0.454758	test-rmse:0.458799
[190]	train-rmse:0.454732	test-rmse:0.458785
[191]	train-rmse:0.454707	test-rmse:0.458781
[192]	train-rmse:0.454676	test-rmse:0.458763
[193]	train-rmse:0.454644	test-rmse:0.458749
[194]	train-rmse:0.454612	test-rmse:0.458727
[195]	train-rmse:0.454586	test-rmse:0.458717
[196]	train-rmse:0.454553	test-rmse:0.458698
[197]	train-rmse:0.454522	test-rmse:0.458686
[198]	train-rmse:0.454498	test-rmse:0.45868
[199]	train-rmse:0.454471	test-rmse:0.45867
[200]	train-rmse:0.454441	test-rmse:0.458658
[201]	train-rmse:0.454419	test-rmse:0.458648
[202]	train-rmse:0.454394	test-rmse:0.458632
[203]	train-rmse:0.454364	test-rmse:0.458618
[204]	train-rmse:0.454324	test-rmse:0.458591
[205]	train-rmse:0.454293	test-rmse:0.45858
[206]	train-rmse:0.454252	test-rmse:0.45856
[207]	train-rmse:0.454214	test-rmse:0.458538
[208]	train-rmse:0.454184	test-rmse:0.458523
[209]	train-rmse:0.454149	test-rmse:0.458506
[210]	train-rmse:0.45412	test-rmse:0.458498
[211]	train-rmse:0.454096	test-rmse:0.458492
[212]	train-rmse:0.454066	test-rmse:0.458483
[213]	train-rmse:0.454031	test-rmse:0.458469
[214]	train-rmse:0.454001	test-rmse:0.458454
[215]	train-rmse:0.453978	test-rmse:0.458447
[216]	train-rmse:0.453957	test-rmse:0.458442
[217]	train-rmse:0.45393	test-rmse:0.45844
[218]	train-rmse:0.453906	test-rmse:0.458437
[219]	train-rmse:0.453883	test-rmse:0.458433
[220]	train-rmse:0.453854	test-rmse:0.458424
[221]	train-rmse:0.453833	test-rmse:0.458417
[222]	train-rmse:0.453804	test-rmse:0.458404
[223]	train-rmse:0.453771	test-rmse:0.458387
[224]	train-rmse:0.453738	test-rmse:0.458379
[225]	train-rmse:0.453705	test-rmse:0.458364
[226]	train-rmse:0.453666	test-rmse:0.45834
[227]	train-rmse:0.453637	test-rmse:0.458324
[228]	train-rmse:0.453604	test-rmse:0.458308
[229]	train-rmse:0.453568	test-rmse:0.458286
[230]	train-rmse:0.453544	test-rmse:0.458278
[231]	train-rmse:0.453517	test-rmse:0.458261
[232]	train-rmse:0.453487	test-rmse:0.458256
[233]	train-rmse:0.453461	test-rmse:0.458247
[234]	train-rmse:0.453427	test-rmse:0.458229
[235]	train-rmse:0.453395	test-rmse:0.458214
[236]	train-rmse:0.453372	test-rmse:0.458208
[237]	train-rmse:0.453348	test-rmse:0.458201
[238]	train-rmse:0.453319	test-rmse:0.458192
[239]	train-rmse:0.453297	test-rmse:0.458187
[240]	train-rmse:0.453274	test-rmse:0.458178
[241]	train-rmse:0.453253	test-rmse:0.45817
[242]	train-rmse:0.45322	test-rmse:0.458155
[243]	train-rmse:0.453195	test-rmse:0.458147
[244]	train-rmse:0.453173	test-rmse:0.458142
[245]	train-rmse:0.453138	test-rmse:0.458128
[246]	train-rmse:0.453114	test-rmse:0.45812
[247]	train-rmse:0.453088	test-rmse:0.45811
[248]	train-rmse:0.453068	test-rmse:0.458109
[249]	train-rmse:0.453038	test-rmse:0.458096
[250]	train-rmse:0.45301	test-rmse:0.458085
[251]	train-rmse:0.452982	test-rmse:0.458079
[252]	train-rmse:0.452954	test-rmse:0.458069
[253]	train-rmse:0.452933	test-rmse:0.458065
[254]	train-rmse:0.452898	test-rmse:0.458045
[255]	train-rmse:0.452879	test-rmse:0.45804
[256]	train-rmse:0.452847	test-rmse:0.458028
[257]	train-rmse:0.452822	test-rmse:0.458019
[258]	train-rmse:0.452791	test-rmse:0.458005
[259]	train-rmse:0.452766	test-rmse:0.457988
[260]	train-rmse:0.452739	test-rmse:0.457979
[261]	train-rmse:0.452722	test-rmse:0.457977
[262]	train-rmse:0.452691	test-rmse:0.457961
[263]	train-rmse:0.452669	test-rmse:0.457954
[264]	train-rmse:0.452642	test-rmse:0.457943
[265]	train-rmse:0.452607	test-rmse:0.45792
[266]	train-rmse:0.452585	test-rmse:0.457913
[267]	train-rmse:0.452562	test-rmse:0.457908
[268]	train-rmse:0.452537	test-rmse:0.457899
[269]	train-rmse:0.452509	test-rmse:0.457888
[270]	train-rmse:0.452482	test-rmse:0.45788
[271]	train-rmse:0.452453	test-rmse:0.457868
[272]	train-rmse:0.452423	test-rmse:0.45786
[273]	train-rmse:0.452389	test-rmse:0.457846
[274]	train-rmse:0.45236	test-rmse:0.457834
[275]	train-rmse:0.452339	test-rmse:0.457828
[276]	train-rmse:0.452316	test-rmse:0.457822
[277]	train-rmse:0.452296	test-rmse:0.457817
[278]	train-rmse:0.452268	test-rmse:0.457805
[279]	train-rmse:0.452244	test-rmse:0.457801
[280]	train-rmse:0.452222	test-rmse:0.45779
[281]	train-rmse:0.452196	test-rmse:0.457782
[282]	train-rmse:0.452174	test-rmse:0.457773
[283]	train-rmse:0.452149	test-rmse:0.457759
[284]	train-rmse:0.452122	test-rmse:0.457752
[285]	train-rmse:0.452101	test-rmse:0.457744
[286]	train-rmse:0.452083	test-rmse:0.457738
[287]	train-rmse:0.452059	test-rmse:0.457734
[288]	train-rmse:0.452025	test-rmse:0.457713
[289]	train-rmse:0.451997	test-rmse:0.457701
[290]	train-rmse:0.451978	test-rmse:0.457696
[291]	train-rmse:0.451958	test-rmse:0.457692
[292]	train-rmse:0.45194	test-rmse:0.457688
[293]	train-rmse:0.451917	test-rmse:0.457677
[294]	train-rmse:0.451895	test-rmse:0.457669
[295]	train-rmse:0.451867	test-rmse:0.457656
[296]	train-rmse:0.451842	test-rmse:0.457648
[297]	train-rmse:0.451822	test-rmse:0.457639
[298]	train-rmse:0.451796	test-rmse:0.457633
[299]	train-rmse:0.45177	test-rmse:0.457627
[300]	train-rmse:0.451738	test-rmse:0.457606
[301]	train-rmse:0.451706	test-rmse:0.457592
[302]	train-rmse:0.451689	test-rmse:0.457587
[303]	train-rmse:0.451668	test-rmse:0.457585
[304]	train-rmse:0.451628	test-rmse:0.457558
[305]	train-rmse:0.451599	test-rmse:0.457544
[306]	train-rmse:0.451579	test-rmse:0.457537
[307]	train-rmse:0.451549	test-rmse:0.457524
[308]	train-rmse:0.451526	test-rmse:0.457518
[309]	train-rmse:0.451494	test-rmse:0.457504
[310]	train-rmse:0.451475	test-rmse:0.457502
[311]	train-rmse:0.451454	test-rmse:0.457501
[312]	train-rmse:0.451425	test-rmse:0.457489
[313]	train-rmse:0.451399	test-rmse:0.457478
[314]	train-rmse:0.451369	test-rmse:0.457469
[315]	train-rmse:0.451351	test-rmse:0.457465
[316]	train-rmse:0.45133	test-rmse:0.457461
[317]	train-rmse:0.451299	test-rmse:0.457448
[318]	train-rmse:0.451279	test-rmse:0.457441
[319]	train-rmse:0.45126	test-rmse:0.457432
[320]	train-rmse:0.451242	test-rmse:0.457431
[321]	train-rmse:0.451222	test-rmse:0.457429
[322]	train-rmse:0.451201	test-rmse:0.457426
[323]	train-rmse:0.451184	test-rmse:0.45742
[324]	train-rmse:0.451152	test-rmse:0.457406
[325]	train-rmse:0.451125	test-rmse:0.457398
[326]	train-rmse:0.451092	test-rmse:0.457376
[327]	train-rmse:0.451059	test-rmse:0.457362
[328]	train-rmse:0.45104	test-rmse:0.457359
[329]	train-rmse:0.451024	test-rmse:0.457355
[330]	train-rmse:0.451007	test-rmse:0.457355
[331]	train-rmse:0.450975	test-rmse:0.457341
[332]	train-rmse:0.450949	test-rmse:0.457331
[333]	train-rmse:0.450932	test-rmse:0.457331
[334]	train-rmse:0.450912	test-rmse:0.457331
[335]	train-rmse:0.450881	test-rmse:0.457317
[336]	train-rmse:0.450859	test-rmse:0.45732
[337]	train-rmse:0.450838	test-rmse:0.457311
[338]	train-rmse:0.450819	test-rmse:0.457301
[339]	train-rmse:0.450802	test-rmse:0.457296
[340]	train-rmse:0.45078	test-rmse:0.457294
[341]	train-rmse:0.450761	test-rmse:0.457294
[342]	train-rmse:0.450742	test-rmse:0.457292
[343]	train-rmse:0.450721	test-rmse:0.457293
[344]	train-rmse:0.450694	test-rmse:0.457288
[345]	train-rmse:0.450671	test-rmse:0.457282
[346]	train-rmse:0.450648	test-rmse:0.457273
[347]	train-rmse:0.450629	test-rmse:0.457266
[348]	train-rmse:0.450604	test-rmse:0.457255
[349]	train-rmse:0.450572	test-rmse:0.45724
[350]	train-rmse:0.450548	test-rmse:0.457233
[351]	train-rmse:0.450533	test-rmse:0.457233
[352]	train-rmse:0.450518	test-rmse:0.457229
[353]	train-rmse:0.450495	test-rmse:0.457227
[354]	train-rmse:0.450471	test-rmse:0.457216
[355]	train-rmse:0.450453	test-rmse:0.457215
[356]	train-rmse:0.450432	test-rmse:0.45721
[357]	train-rmse:0.450418	test-rmse:0.457208
[358]	train-rmse:0.450395	test-rmse:0.457203
[359]	train-rmse:0.45038	test-rmse:0.457203
[360]	train-rmse:0.45036	test-rmse:0.457198
[361]	train-rmse:0.450338	test-rmse:0.457192
[362]	train-rmse:0.450324	test-rmse:0.45719
[363]	train-rmse:0.450307	test-rmse:0.457186
[364]	train-rmse:0.450278	test-rmse:0.457174
[365]	train-rmse:0.450263	test-rmse:0.457174
[366]	train-rmse:0.450242	test-rmse:0.457167
[367]	train-rmse:0.450219	test-rmse:0.457159
[368]	train-rmse:0.450198	test-rmse:0.457153
[369]	train-rmse:0.450185	test-rmse:0.457154
[370]	train-rmse:0.450171	test-rmse:0.457156
[371]	train-rmse:0.450149	test-rmse:0.457148
[372]	train-rmse:0.450132	test-rmse:0.457147
[373]	train-rmse:0.450115	test-rmse:0.45714
[374]	train-rmse:0.450093	test-rmse:0.457139
[375]	train-rmse:0.45007	test-rmse:0.45713
[376]	train-rmse:0.450047	test-rmse:0.457126
[377]	train-rmse:0.450026	test-rmse:0.45712
[378]	train-rmse:0.450009	test-rmse:0.457117
[379]	train-rmse:0.449995	test-rmse:0.457116
[380]	train-rmse:0.449971	test-rmse:0.457112
[381]	train-rmse:0.449951	test-rmse:0.457108
[382]	train-rmse:0.449929	test-rmse:0.4571
[383]	train-rmse:0.449909	test-rmse:0.457098
[384]	train-rmse:0.44989	test-rmse:0.457091
[385]	train-rmse:0.449869	test-rmse:0.457089
[386]	train-rmse:0.449847	test-rmse:0.457085
[387]	train-rmse:0.449825	test-rmse:0.457078
[388]	train-rmse:0.449803	test-rmse:0.457071
[389]	train-rmse:0.449785	test-rmse:0.457065
[390]	train-rmse:0.449762	test-rmse:0.457064
[391]	train-rmse:0.44974	test-rmse:0.45706
[392]	train-rmse:0.449724	test-rmse:0.457057
[393]	train-rmse:0.449703	test-rmse:0.457052
[394]	train-rmse:0.449669	test-rmse:0.457036
[395]	train-rmse:0.449647	test-rmse:0.457033
[396]	train-rmse:0.44963	test-rmse:0.457029
[397]	train-rmse:0.44961	test-rmse:0.45702
[398]	train-rmse:0.449595	test-rmse:0.457021
[399]	train-rmse:0.44958	test-rmse:0.45702
[400]	train-rmse:0.44956	test-rmse:0.457016
[401]	train-rmse:0.449546	test-rmse:0.457014
[402]	train-rmse:0.449526	test-rmse:0.457008
[403]	train-rmse:0.449508	test-rmse:0.457004
[404]	train-rmse:0.449488	test-rmse:0.457006
[405]	train-rmse:0.449471	test-rmse:0.457006
[406]	train-rmse:0.449454	test-rmse:0.457002
[407]	train-rmse:0.449436	test-rmse:0.456998
[408]	train-rmse:0.449418	test-rmse:0.456996
[409]	train-rmse:0.449395	test-rmse:0.456988
[410]	train-rmse:0.449375	test-rmse:0.456983
[411]	train-rmse:0.449358	test-rmse:0.456984
[412]	train-rmse:0.449327	test-rmse:0.456967
[413]	train-rmse:0.449308	test-rmse:0.456963
[414]	train-rmse:0.449288	test-rmse:0.456956
[415]	train-rmse:0.449272	test-rmse:0.456953
[416]	train-rmse:0.44925	test-rmse:0.456944
[417]	train-rmse:0.449239	test-rmse:0.456944
[418]	train-rmse:0.449218	test-rmse:0.456938
[419]	train-rmse:0.449199	test-rmse:0.456932
[420]	train-rmse:0.449184	test-rmse:0.456927
[421]	train-rmse:0.449165	test-rmse:0.456921
[422]	train-rmse:0.44915	test-rmse:0.456918
[423]	train-rmse:0.449134	test-rmse:0.456916
[424]	train-rmse:0.449113	test-rmse:0.456904
[425]	train-rmse:0.44909	test-rmse:0.456897
[426]	train-rmse:0.449071	test-rmse:0.456891
[427]	train-rmse:0.449052	test-rmse:0.456888
[428]	train-rmse:0.449039	test-rmse:0.45689
[429]	train-rmse:0.449017	test-rmse:0.456883
[430]	train-rmse:0.449	test-rmse:0.456877
[431]	train-rmse:0.448982	test-rmse:0.456871
[432]	train-rmse:0.448959	test-rmse:0.456863
[433]	train-rmse:0.448943	test-rmse:0.456859
[434]	train-rmse:0.448927	test-rmse:0.456857
[435]	train-rmse:0.448908	test-rmse:0.456855
[436]	train-rmse:0.44889	test-rmse:0.456854
[437]	train-rmse:0.448871	test-rmse:0.45685
[438]	train-rmse:0.448851	test-rmse:0.456844
[439]	train-rmse:0.448827	test-rmse:0.456838
[440]	train-rmse:0.448804	test-rmse:0.456829
[441]	train-rmse:0.44879	test-rmse:0.456828
[442]	train-rmse:0.448771	test-rmse:0.456826
[443]	train-rmse:0.448749	test-rmse:0.456815
[444]	train-rmse:0.448731	test-rmse:0.456818
[445]	train-rmse:0.448716	test-rmse:0.456814
[446]	train-rmse:0.448701	test-rmse:0.456811
[447]	train-rmse:0.448682	test-rmse:0.456807
[448]	train-rmse:0.448666	test-rmse:0.456805
[449]	train-rmse:0.448649	test-rmse:0.456802
[450]	train-rmse:0.448632	test-rmse:0.456801
[451]	train-rmse:0.448611	test-rmse:0.456795
[452]	train-rmse:0.448593	test-rmse:0.456793
[453]	train-rmse:0.448578	test-rmse:0.456789
[454]	train-rmse:0.448562	test-rmse:0.456788
[455]	train-rmse:0.448545	test-rmse:0.456783
[456]	train-rmse:0.448524	test-rmse:0.456777
[457]	train-rmse:0.448512	test-rmse:0.456773
[458]	train-rmse:0.448495	test-rmse:0.456773
[459]	train-rmse:0.448478	test-rmse:0.45677
[460]	train-rmse:0.44845	test-rmse:0.456756
[461]	train-rmse:0.448433	test-rmse:0.456753
[462]	train-rmse:0.448415	test-rmse:0.456752
[463]	train-rmse:0.448397	test-rmse:0.456748
[464]	train-rmse:0.448383	test-rmse:0.456745
[465]	train-rmse:0.448363	test-rmse:0.45674
[466]	train-rmse:0.448339	test-rmse:0.456728
[467]	train-rmse:0.448317	test-rmse:0.456722
[468]	train-rmse:0.448299	test-rmse:0.456723
[469]	train-rmse:0.448281	test-rmse:0.456715
[470]	train-rmse:0.448263	test-rmse:0.456718
[471]	train-rmse:0.448242	test-rmse:0.456717
[472]	train-rmse:0.448222	test-rmse:0.456711
[473]	train-rmse:0.448204	test-rmse:0.456709
[474]	train-rmse:0.448185	test-rmse:0.456704
[475]	train-rmse:0.448164	test-rmse:0.456699
[476]	train-rmse:0.448147	test-rmse:0.456699
[477]	train-rmse:0.448128	test-rmse:0.456691
[478]	train-rmse:0.448102	test-rmse:0.456677
[479]	train-rmse:0.448086	test-rmse:0.456676
[480]	train-rmse:0.448074	test-rmse:0.456675
[481]	train-rmse:0.448059	test-rmse:0.456676
[482]	train-rmse:0.448042	test-rmse:0.456676
[483]	train-rmse:0.448024	test-rmse:0.45667
[484]	train-rmse:0.448	test-rmse:0.456662
[485]	train-rmse:0.44798	test-rmse:0.456656
[486]	train-rmse:0.447958	test-rmse:0.456655
[487]	train-rmse:0.447942	test-rmse:0.456655
[488]	train-rmse:0.44792	test-rmse:0.456646
[489]	train-rmse:0.447898	test-rmse:0.456638
[490]	train-rmse:0.447879	test-rmse:0.456633
[491]	train-rmse:0.44786	test-rmse:0.456633
[492]	train-rmse:0.447843	test-rmse:0.456626
[493]	train-rmse:0.447823	test-rmse:0.456621
[494]	train-rmse:0.447803	test-rmse:0.456618
[495]	train-rmse:0.447784	test-rmse:0.456614
[496]	train-rmse:0.447762	test-rmse:0.456607
[497]	train-rmse:0.447743	test-rmse:0.456604
[498]	train-rmse:0.447727	test-rmse:0.456602
[499]	train-rmse:0.447715	test-rmse:0.456601
[500]	train-rmse:0.4477	test-rmse:0.4566
[501]	train-rmse:0.447679	test-rmse:0.456594
[502]	train-rmse:0.447659	test-rmse:0.456587
[503]	train-rmse:0.447638	test-rmse:0.456583
[504]	train-rmse:0.447619	test-rmse:0.456581
[505]	train-rmse:0.447602	test-rmse:0.456581
[506]	train-rmse:0.447585	test-rmse:0.456579
[507]	train-rmse:0.447569	test-rmse:0.456579
[508]	train-rmse:0.447555	test-rmse:0.456575
[509]	train-rmse:0.447539	test-rmse:0.456571
[510]	train-rmse:0.447523	test-rmse:0.456566
[511]	train-rmse:0.447508	test-rmse:0.456564
[512]	train-rmse:0.447489	test-rmse:0.456559
[513]	train-rmse:0.447473	test-rmse:0.456559
[514]	train-rmse:0.447455	test-rmse:0.456555
[515]	train-rmse:0.447437	test-rmse:0.456552
[516]	train-rmse:0.447422	test-rmse:0.45655
[517]	train-rmse:0.447405	test-rmse:0.45655
[518]	train-rmse:0.447389	test-rmse:0.456549
[519]	train-rmse:0.447374	test-rmse:0.456546
[520]	train-rmse:0.447357	test-rmse:0.45654
[521]	train-rmse:0.447342	test-rmse:0.456538
[522]	train-rmse:0.447329	test-rmse:0.456535
[523]	train-rmse:0.44731	test-rmse:0.456529
[524]	train-rmse:0.447293	test-rmse:0.456528
[525]	train-rmse:0.447278	test-rmse:0.456527
[526]	train-rmse:0.447257	test-rmse:0.456523
[527]	train-rmse:0.447241	test-rmse:0.456522
[528]	train-rmse:0.447226	test-rmse:0.456519
[529]	train-rmse:0.447208	test-rmse:0.456513
[530]	train-rmse:0.447191	test-rmse:0.456511
[531]	train-rmse:0.447175	test-rmse:0.45651
[532]	train-rmse:0.447157	test-rmse:0.456505
[533]	train-rmse:0.447134	test-rmse:0.456498
[534]	train-rmse:0.447115	test-rmse:0.456495
[535]	train-rmse:0.447096	test-rmse:0.456494
[536]	train-rmse:0.447084	test-rmse:0.456493
[537]	train-rmse:0.447066	test-rmse:0.456491
[538]	train-rmse:0.447051	test-rmse:0.456491
[539]	train-rmse:0.447028	test-rmse:0.456484
[540]	train-rmse:0.447012	test-rmse:0.456482
[541]	train-rmse:0.446996	test-rmse:0.456481
[542]	train-rmse:0.446977	test-rmse:0.456479
[543]	train-rmse:0.44696	test-rmse:0.456476
[544]	train-rmse:0.446945	test-rmse:0.456477
[545]	train-rmse:0.446925	test-rmse:0.456472
[546]	train-rmse:0.446903	test-rmse:0.456464
[547]	train-rmse:0.446884	test-rmse:0.456464
[548]	train-rmse:0.446865	test-rmse:0.456464
[549]	train-rmse:0.44685	test-rmse:0.456462
[550]	train-rmse:0.446826	test-rmse:0.456447
[551]	train-rmse:0.446809	test-rmse:0.456444
[552]	train-rmse:0.44679	test-rmse:0.456443
[553]	train-rmse:0.446772	test-rmse:0.456443
[554]	train-rmse:0.446761	test-rmse:0.456443
[555]	train-rmse:0.446743	test-rmse:0.45644
[556]	train-rmse:0.446725	test-rmse:0.456437
[557]	train-rmse:0.446707	test-rmse:0.456438
[558]	train-rmse:0.44669	test-rmse:0.456435
[559]	train-rmse:0.446669	test-rmse:0.456423
[560]	train-rmse:0.446651	test-rmse:0.456424
[561]	train-rmse:0.446636	test-rmse:0.456423
[562]	train-rmse:0.446622	test-rmse:0.456422
[563]	train-rmse:0.446608	test-rmse:0.456422
[564]	train-rmse:0.446596	test-rmse:0.456422
[565]	train-rmse:0.44658	test-rmse:0.45642
[566]	train-rmse:0.446561	test-rmse:0.456417
[567]	train-rmse:0.446544	test-rmse:0.456418
[568]	train-rmse:0.446529	test-rmse:0.45642
[569]	train-rmse:0.446512	test-rmse:0.45642
[570]	train-rmse:0.446497	test-rmse:0.456418
     test-rmse-mean  test-rmse-std  train-rmse-mean  train-rmse-std
562        0.456422       0.000445         0.446622        0.000113
563        0.456422       0.000447         0.446608        0.000115
564        0.456422       0.000447         0.446596        0.000116
565        0.456420       0.000451         0.446580        0.000116
566        0.456417       0.000442         0.446561        0.000120

In [24]:
param_11 = {'booster':'gbtree',
         'nthread': 10,
         'max_depth':5, 
         'eta':0.2,
         'silent':1,
         'subsample':0.7, 
         'objective':'reg:linear',
         'eval_metric':'rmse',
         'colsample_bytree':0.7}

In [28]:
num_round = 566

dtest_11 = xgb.DMatrix(test_dataset_normalize[predictors_11], missing=np.nan)
submission_11 = train_pivot_6789_to_11[['id']].copy()
j =0 
for j in range(20):
    
    train_pivot_xgb_time2_sample = train_dataset_normalize[predictors_target_11].sample(2000000)
    train_feature_11 = train_pivot_xgb_time2_sample.drop(['target'],axis = 1)
    train_label_11 = train_pivot_xgb_time2_sample[['target']]

    dtrain_11 = xgb.DMatrix(train_feature_11,label = train_label_11,missing= np.nan)
    
    bst_11 = xgb.train(param_11, dtrain_11, num_round)
    print str(j) + 'training finished!'
    submission_11['predict_' + str(j)] = bst_11.predict(dtest_11)

print 'finished'


0training finished!
1training finished!
2training finished!
3training finished!
4training finished!
5training finished!
6training finished!
7training finished!
8training finished!
9training finished!
10training finished!
11training finished!
12training finished!
13training finished!
14training finished!
15training finished!
16training finished!
17training finished!
18training finished!
19training finished!
finished

In [12]:
# make prediction
dtest_11 = xgb.DMatrix(train_pivot_6789_to_11[predictors], missing=NaN)
submission_11 = train_pivot_6789_to_11[['id']].copy()
submission_11['predict'] = bst.predict(dtest)
xgb.plot_importance(bst)

In [29]:
submission_11.to_csv('submission_11_new.csv')

In [11]:
submission_11 = pd.read_csv('submission_11_new.csv',index_col =0)

In [12]:
submission_11.columns.values


Out[12]:
array(['id', 'predict_0', 'predict_1', 'predict_2', 'predict_3',
       'predict_4', 'predict_5', 'predict_6', 'predict_7', 'predict_8',
       'predict_9', 'predict_10', 'predict_11', 'predict_12', 'predict_13',
       'predict_14', 'predict_15', 'predict_16', 'predict_17',
       'predict_18', 'predict_19'], dtype=object)

for week 10



In [2]:
%ls


1.5_create_lag.ipynb              preprocessed_products.csv
1_predata.ipynb                   RF_model/
1_predata_whole.ipynb             ruta_for_cliente_producto.csv
3_xgb_43fea.ipynb                 stack_sub/
3_xgb.ipynb                       submission_10_new.csv
3_xgb_prediction.ipynb            submission_11_new.csv
3_xgb_test.ipynb                  submission_44fea.csv
4_keras_nn.ipynb                  submission_all_train.csv
5_random_forest.ipynb             submission_nn.csv
6_stack_model.ipynb               submission_nn_xgb
7_SGD_regressor.ipynb             train_pivot_56789_to_10_44fea.pickle
8_svm_linearSVR.ipynb             train_pivot_56789_to_10_44fea_zero.pickle
agencia_for_cliente_producto.csv  train_pivot_56789_to_10_new.pickle
bst_use_all_train.model           train_pivot_6789_to_11_new.pickle
canal_for_cliente_producto.csv    train_pivot_xgb_time1_44fea.csv
old_submission/                   train_pivot_xgb_time1_44fea_zero.csv
origin/                           train_pivot_xgb_time1.pickle
pivot_test.pickle                 train_pivot_xgb_time2_38fea.csv
pivot_train_with_nan.pickle

In [4]:
predictors_target_10 = ['ruta_freq', 'clien_freq', 'agen_freq', 'prod_freq',
       'agen_for_log_de', 'ruta_for_log_de', 'cliente_for_log_de',
       'producto_for_log_de', 'agen_ruta_for_log_de',
       'agen_cliente_for_log_de', 'agen_producto_for_log_de',
       'ruta_cliente_for_log_de', 'ruta_producto_for_log_de',
       'cliente_producto_for_log_de', 'cliente_for_log_sum',
       'cliente_producto_agen_for_log_sum', 'corr', 't_min_1', 't_min_2',
       't_min_3', 't_min_4', 't_min_5', 't1_min_t2', 't1_min_t3',
       't1_min_t4', 't1_min_t5', 't2_min_t3', 't2_min_t4', 't2_min_t5',
       't3_min_t4', 't3_min_t5', 't4_min_t5', 'LR_prod', 'LR_prod_corr',
       'target', 't_m_5_cum', 't_m_4_cum', 't_m_3_cum', 't_m_2_cum',
       't_m_1_cum', 'NombreCliente', 'weight', 'weight_per_piece', 'pieces']

In [5]:
predictors_10 = ['ruta_freq', 'clien_freq', 'agen_freq', 'prod_freq',
       'agen_for_log_de', 'ruta_for_log_de', 'cliente_for_log_de',
       'producto_for_log_de', 'agen_ruta_for_log_de',
       'agen_cliente_for_log_de', 'agen_producto_for_log_de',
       'ruta_cliente_for_log_de', 'ruta_producto_for_log_de',
       'cliente_producto_for_log_de', 'cliente_for_log_sum',
       'cliente_producto_agen_for_log_sum', 'corr', 't_min_1', 't_min_2',
       't_min_3', 't_min_4', 't_min_5', 't1_min_t2', 't1_min_t3',
       't1_min_t4', 't1_min_t5', 't2_min_t3', 't2_min_t4', 't2_min_t5',
       't3_min_t4', 't3_min_t5', 't4_min_t5', 'LR_prod', 'LR_prod_corr',
       't_m_5_cum', 't_m_4_cum', 't_m_3_cum', 't_m_2_cum',
       't_m_1_cum', 'NombreCliente', 'weight', 'weight_per_piece', 'pieces']

In [28]:
def normalize_dataset_10(train_dataset,test_dataset):
    train_dataset_normalize = train_dataset[predictors_10].copy()
    train_dataset_normalize['label'] = 0    
    
    test_dataset_normalize = test_dataset[predictors_10].copy()
    test_dataset_normalize['label'] = 1
    
    whole_dataset = pd.concat([train_dataset_normalize,test_dataset_normalize],copy = False)
    whole_dataset_normalize = whole_dataset.apply(f,axis = 0)
    
    train_dataset_normalize = whole_dataset_normalize.loc[whole_dataset['label'] == 0]
    test_dataset_normalize = whole_dataset_normalize.loc[whole_dataset['label']==1]
    
    train_dataset_normalize.drop(['label'],axis = 1,inplace = True)
    test_dataset_normalize.drop(['label'],axis =1,inplace = True)
    
    train_dataset_normalize['target'] = train_dataset['target'].copy() 
    test_dataset_normalize['id'] = test_dataset['id'].copy() 

    
#     target = train_dataset['target']
    return train_dataset_normalize,test_dataset_normalize

In [29]:
f = lambda x : (x-x.mean())/x.std(ddof=0)

In [6]:
train_pivot_xgb_time1 = pd.read_csv('train_pivot_45678_to_9_whole_zero.csv',
                                     dtype=np.float32,usecols = predictors_target_10)

In [19]:
train_pivot_56789_to_10 = pd.read_pickle('train_pivot_56789_to_10_44fea_zero.pickle')

In [20]:
train_pivot_56789_to_10['id'] = train_pivot_56789_to_10['id'].astype(int)
train_pivot_56789_to_10.head()


Out[20]:
Semana id ruta_freq clien_freq agen_freq prod_freq agen_for_log_de ruta_for_log_de cliente_for_log_de producto_for_log_de agen_ruta_for_log_de ... LR_prod_corr t_m_5_cum t_m_4_cum t_m_3_cum t_m_2_cum t_m_1_cum NombreCliente weight weight_per_piece pieces
0 1569352 10.0 166.0 3604.0 143.0 4.001407 3.411275 2.890955 2.498162 3.411275 ... 0.106650 0.000000 0.000000 0.000000 0.000000 0.000000 131.0 691.0 NaN NaN
1 6667200 713.0 166.0 12208.0 10842.0 3.523074 3.222417 2.890955 4.361940 2.835826 ... 21.111349 3.784190 7.280697 10.714684 14.403563 18.092443 6027.0 740.0 NaN NaN
2 1592616 713.0 166.0 12208.0 10780.0 3.523074 3.222417 2.890955 3.987424 2.835826 ... 19.113272 0.000000 2.833213 6.165418 8.730368 11.949243 6027.0 480.0 NaN NaN
3 3909690 713.0 166.0 12208.0 13222.0 3.523074 3.222417 2.890955 4.529289 2.835826 ... 21.542048 4.430817 8.144389 12.461877 16.556221 20.886955 6027.0 680.0 NaN NaN
4 3659672 713.0 166.0 12208.0 10881.0 3.523074 3.222417 2.890955 3.238592 2.835826 ... 14.471490 3.583519 7.138867 10.827746 14.656388 18.319950 6027.0 567.0 NaN NaN

5 rows × 44 columns


In [6]:
train_pivot_56789_to_10.columns.values


Out[6]:
array(['id', 'ruta_freq', 'clien_freq', 'agen_freq', 'prod_freq',
       'agen_for_log_de', 'ruta_for_log_de', 'cliente_for_log_de',
       'producto_for_log_de', 'agen_ruta_for_log_de',
       'agen_cliente_for_log_de', 'agen_producto_for_log_de',
       'ruta_cliente_for_log_de', 'ruta_producto_for_log_de',
       'cliente_producto_for_log_de', 'cliente_for_log_sum',
       'cliente_producto_agen_for_log_sum', 'corr', 't_min_1', 't_min_2',
       't_min_3', 't_min_4', 't_min_5', 't1_min_t2', 't1_min_t3',
       't1_min_t4', 't1_min_t5', 't2_min_t3', 't2_min_t4', 't2_min_t5',
       't3_min_t4', 't3_min_t5', 't4_min_t5', 'LR_prod', 'LR_prod_corr',
       't_m_5_cum', 't_m_4_cum', 't_m_3_cum', 't_m_2_cum', 't_m_1_cum',
       'NombreCliente', 'weight', 'weight_per_piece', 'pieces'], dtype=object)

In [7]:
train_pivot_xgb_time1.columns.values


Out[7]:
array(['ruta_freq', 'clien_freq', 'agen_freq', 'prod_freq',
       'agen_for_log_de', 'ruta_for_log_de', 'cliente_for_log_de',
       'producto_for_log_de', 'agen_ruta_for_log_de',
       'agen_cliente_for_log_de', 'agen_producto_for_log_de',
       'ruta_cliente_for_log_de', 'ruta_producto_for_log_de',
       'cliente_producto_for_log_de', 'cliente_for_log_sum',
       'cliente_producto_agen_for_log_sum', 'corr', 't_min_1', 't_min_2',
       't_min_3', 't_min_4', 't_min_5', 't1_min_t2', 't1_min_t3',
       't1_min_t4', 't1_min_t5', 't2_min_t3', 't2_min_t4', 't2_min_t5',
       't3_min_t4', 't3_min_t5', 't4_min_t5', 'LR_prod', 'LR_prod_corr',
       'target', 't_m_5_cum', 't_m_4_cum', 't_m_3_cum', 't_m_2_cum',
       't_m_1_cum', 'NombreCliente', 'weight', 'weight_per_piece', 'pieces'], dtype=object)

begin xgboost



In [9]:
%ls


1.5_create_lag.ipynb              pivot_train_with_nan.pickle
1_predata.ipynb                   preprocessed_products.csv
1_predata_whole.ipynb             RF_model/
3_xgb_43fea.ipynb                 ruta_for_cliente_producto.csv
3_xgb.ipynb                       stack_sub/
3_xgb_prediction.ipynb            submission_10_new.csv
3_xgb_test.ipynb                  submission_11_new.csv
4_keras_nn.ipynb                  submission_44fea.csv
5_random_forest.ipynb             submission_all_train_10.csv
6_stack_model.ipynb               submission_all_train_11.csv
7_SGD_regressor.ipynb             submission_all_train_12.csv
8_svm_linearSVR.ipynb             submission_all_train_13.csv
9_private_board.ipynb             submission_all_train_14.csv
agencia_for_cliente_producto.csv  submission_all_train_2.csv
bst_1000_eta001_2.model           submission_all_train_3.csv
bst_1000_eta001.model             submission_all_train_4.csv
bst_1000_eta002.model             submission_all_train_5.csv
bst_1000.model                    submission_all_train_6.csv
bst_1200_eta0015.model            submission_all_train_7.csv
bst_1200_eta005.model             submission_all_train_8.csv
bst_1400_eta0015.model            submission_all_train_9.csv
bst_1400_eta002.model             submission_all_train.csv
bst_1800_eta0015.model            submission_nn.csv
bst_200_eta005.model              submission_nn_xgb
bst_400_eta002.model              train_pivot_34567_to_9.csv
bst_400_eta005.model              train_pivot_45678_to_9_whole_zero.csv
bst_551_eta02.model               train_pivot_56789_to_10_44fea.pickle
bst_600_eta001.model              train_pivot_56789_to_10_44fea_zero.pickle
bst_600_eta002.model              train_pivot_56789_to_10_new.pickle
bst_600_eta005.model              train_pivot_56789_to_11_private.csv
bst_800_eta002.model              train_pivot_6789_to_11_new.pickle
bst_use_all_train.model           train_pivot_xgb_time1_44fea.csv
canal_for_cliente_producto.csv    train_pivot_xgb_time1_44fea_zero.csv
old_submission/                   train_pivot_xgb_time1.pickle
origin/                           train_pivot_xgb_time2_38fea.csv
pivot_test.pickle

In [38]:
#right now the best is this parameter set with 700 round
param_10 = {'booster':'gbtree',
         'nthread': 12,
         'max_depth':8, 
         'eta':0.1,
         'silent':1,
         'subsample':0.5, 
         'objective':'reg:linear',
         'eval_metric':'rmse',
         'colsample_bytree':0.7}

In [10]:
train_feature_10 = train_pivot_xgb_time1.drop(['target'],axis = 1)
train_label_10 = train_pivot_xgb_time1['target']

dtrain_10 = xgb.DMatrix(train_feature_10,label = train_label_10,missing= np.nan)

In [11]:
bst = xgb.Booster({'nthread':8}) #init model
bst.load_model("bst_1400_eta0015.model") # load data

In [67]:
bst


Out[67]:
<xgboost.core.Booster at 0x7fa7b52cbd90>

In [21]:
dtest_10 = xgb.DMatrix(train_pivot_56789_to_10.drop(['id'],axis =1), missing=np.nan)

In [35]:
submission_10_all_train = pd.DataFrame()
submission_10_all_train = train_pivot_56789_to_10[['id']].copy()
submission_10_all_train['predict'] = bst.predict(dtest_10)
submission_10_all_train.reset_index(drop = True,inplace = True)

In [36]:
submission_10_all_train.to_csv('week10_private2.csv',index = False)

In [37]:
submission_10_all_train['predict'].describe()


Out[37]:
count    3.538385e+06
mean     1.573562e+00
std      7.086894e-01
min     -3.240995e-01
25%      1.061112e+00
50%      1.408914e+00
75%      1.898488e+00
max      7.382428e+00
Name: predict, dtype: float64

In [14]:
submission_10_all_train.head()


Out[14]:
Semana id predict
0 1569352 2.389577
1 6667200 3.532464
2 1592616 2.962314
3 3909690 4.135859
4 3659672 3.563721

In [15]:
submission_10_all_train.to_csv('week10_private.csv')

In [33]:
num_round = 1600
evallist = [(dtrain_10,'train')]
bst = xgb.train(param_10, dtrain_10, num_round,evallist)


[0]	train-rmse:1.35052
[1]	train-rmse:1.33273
[2]	train-rmse:1.31522
[3]	train-rmse:1.29818
[4]	train-rmse:1.2813
[5]	train-rmse:1.26485
[6]	train-rmse:1.24863
[7]	train-rmse:1.23255
[8]	train-rmse:1.21692
[9]	train-rmse:1.20137
[10]	train-rmse:1.18612
[11]	train-rmse:1.17115
[12]	train-rmse:1.1564
[13]	train-rmse:1.1419
[14]	train-rmse:1.12764
[15]	train-rmse:1.11363
[16]	train-rmse:1.09985
[17]	train-rmse:1.08633
[18]	train-rmse:1.07303
[19]	train-rmse:1.05999
[20]	train-rmse:1.04719
[21]	train-rmse:1.03474
[22]	train-rmse:1.02235
[23]	train-rmse:1.01018
[24]	train-rmse:0.998356
[25]	train-rmse:0.986818
[26]	train-rmse:0.975354
[27]	train-rmse:0.964049
[28]	train-rmse:0.952932
[29]	train-rmse:0.942135
[30]	train-rmse:0.93145
[31]	train-rmse:0.92095
[32]	train-rmse:0.910629
[33]	train-rmse:0.900503
[34]	train-rmse:0.890605
[35]	train-rmse:0.880889
[36]	train-rmse:0.871322
[37]	train-rmse:0.861935
[38]	train-rmse:0.852721
[39]	train-rmse:0.843735
[40]	train-rmse:0.834869
[41]	train-rmse:0.826338
[42]	train-rmse:0.817924
[43]	train-rmse:0.809639
[44]	train-rmse:0.801448
[45]	train-rmse:0.793433
[46]	train-rmse:0.785558
[47]	train-rmse:0.777833
[48]	train-rmse:0.77039
[49]	train-rmse:0.763075
[50]	train-rmse:0.755813
[51]	train-rmse:0.748692
[52]	train-rmse:0.741711
[53]	train-rmse:0.734963
[54]	train-rmse:0.728327
[55]	train-rmse:0.721888
[56]	train-rmse:0.715467
[57]	train-rmse:0.709168
[58]	train-rmse:0.702995
[59]	train-rmse:0.696965
[60]	train-rmse:0.691093
[61]	train-rmse:0.685299
[62]	train-rmse:0.679689
[63]	train-rmse:0.674152
[64]	train-rmse:0.668724
[65]	train-rmse:0.663415
[66]	train-rmse:0.658245
[67]	train-rmse:0.653175
[68]	train-rmse:0.648209
[69]	train-rmse:0.643406
[70]	train-rmse:0.638728
[71]	train-rmse:0.634064
[72]	train-rmse:0.629535
[73]	train-rmse:0.625164
[74]	train-rmse:0.620829
[75]	train-rmse:0.616625
[76]	train-rmse:0.61252
[77]	train-rmse:0.608489
[78]	train-rmse:0.604515
[79]	train-rmse:0.600674
[80]	train-rmse:0.596892
[81]	train-rmse:0.593182
[82]	train-rmse:0.589571
[83]	train-rmse:0.586049
[84]	train-rmse:0.582591
[85]	train-rmse:0.579232
[86]	train-rmse:0.575932
[87]	train-rmse:0.572729
[88]	train-rmse:0.569593
[89]	train-rmse:0.56654
[90]	train-rmse:0.563564
[91]	train-rmse:0.560649
[92]	train-rmse:0.557826
[93]	train-rmse:0.555055
[94]	train-rmse:0.552392
[95]	train-rmse:0.549736
[96]	train-rmse:0.547164
[97]	train-rmse:0.544661
[98]	train-rmse:0.542294
[99]	train-rmse:0.539908
[100]	train-rmse:0.537575
[101]	train-rmse:0.5353
[102]	train-rmse:0.533092
[103]	train-rmse:0.530919
[104]	train-rmse:0.528802
[105]	train-rmse:0.526762
[106]	train-rmse:0.524762
[107]	train-rmse:0.522844
[108]	train-rmse:0.52092
[109]	train-rmse:0.519087
[110]	train-rmse:0.517277
[111]	train-rmse:0.515521
[112]	train-rmse:0.5138
[113]	train-rmse:0.512169
[114]	train-rmse:0.510554
[115]	train-rmse:0.508966
[116]	train-rmse:0.507433
[117]	train-rmse:0.505914
[118]	train-rmse:0.504444
[119]	train-rmse:0.503011
[120]	train-rmse:0.50161
[121]	train-rmse:0.500246
[122]	train-rmse:0.498919
[123]	train-rmse:0.497614
[124]	train-rmse:0.496364
[125]	train-rmse:0.495143
[126]	train-rmse:0.49397
[127]	train-rmse:0.492819
[128]	train-rmse:0.491685
[129]	train-rmse:0.490604
[130]	train-rmse:0.489521
[131]	train-rmse:0.488469
[132]	train-rmse:0.487452
[133]	train-rmse:0.486468
[134]	train-rmse:0.485509
[135]	train-rmse:0.484574
[136]	train-rmse:0.483652
[137]	train-rmse:0.482753
[138]	train-rmse:0.481893
[139]	train-rmse:0.481027
[140]	train-rmse:0.480206
[141]	train-rmse:0.479391
[142]	train-rmse:0.478615
[143]	train-rmse:0.477842
[144]	train-rmse:0.477108
[145]	train-rmse:0.476404
[146]	train-rmse:0.475695
[147]	train-rmse:0.475012
[148]	train-rmse:0.474351
[149]	train-rmse:0.473713
[150]	train-rmse:0.473082
[151]	train-rmse:0.472471
[152]	train-rmse:0.471899
[153]	train-rmse:0.471327
[154]	train-rmse:0.470773
[155]	train-rmse:0.470206
[156]	train-rmse:0.469682
[157]	train-rmse:0.469162
[158]	train-rmse:0.468651
[159]	train-rmse:0.468145
[160]	train-rmse:0.467662
[161]	train-rmse:0.467169
[162]	train-rmse:0.466712
[163]	train-rmse:0.466268
[164]	train-rmse:0.465819
[165]	train-rmse:0.465385
[166]	train-rmse:0.464976
[167]	train-rmse:0.464587
[168]	train-rmse:0.464199
[169]	train-rmse:0.463817
[170]	train-rmse:0.463429
[171]	train-rmse:0.46307
[172]	train-rmse:0.46272
[173]	train-rmse:0.462364
[174]	train-rmse:0.462023
[175]	train-rmse:0.46169
[176]	train-rmse:0.461373
[177]	train-rmse:0.461077
[178]	train-rmse:0.460768
[179]	train-rmse:0.460476
[180]	train-rmse:0.460183
[181]	train-rmse:0.459901
[182]	train-rmse:0.459621
[183]	train-rmse:0.459357
[184]	train-rmse:0.459094
[185]	train-rmse:0.458831
[186]	train-rmse:0.45858
[187]	train-rmse:0.458336
[188]	train-rmse:0.458092
[189]	train-rmse:0.457862
[190]	train-rmse:0.457632
[191]	train-rmse:0.457416
[192]	train-rmse:0.457199
[193]	train-rmse:0.456989
[194]	train-rmse:0.456791
[195]	train-rmse:0.4566
[196]	train-rmse:0.456401
[197]	train-rmse:0.456206
[198]	train-rmse:0.456031
[199]	train-rmse:0.455857
[200]	train-rmse:0.455676
[201]	train-rmse:0.455495
[202]	train-rmse:0.455335
[203]	train-rmse:0.455176
[204]	train-rmse:0.455011
[205]	train-rmse:0.454858
[206]	train-rmse:0.454699
[207]	train-rmse:0.454543
[208]	train-rmse:0.454399
[209]	train-rmse:0.454259
[210]	train-rmse:0.454113
[211]	train-rmse:0.453972
[212]	train-rmse:0.453841
[213]	train-rmse:0.453717
[214]	train-rmse:0.453594
[215]	train-rmse:0.453471
[216]	train-rmse:0.453359
[217]	train-rmse:0.453236
[218]	train-rmse:0.453113
[219]	train-rmse:0.45301
[220]	train-rmse:0.452901
[221]	train-rmse:0.452793
[222]	train-rmse:0.452669
[223]	train-rmse:0.452554
[224]	train-rmse:0.452443
[225]	train-rmse:0.45235
[226]	train-rmse:0.452234
[227]	train-rmse:0.452132
[228]	train-rmse:0.452047
[229]	train-rmse:0.451944
[230]	train-rmse:0.45184
[231]	train-rmse:0.451745
[232]	train-rmse:0.451655
[233]	train-rmse:0.451572
[234]	train-rmse:0.451483
[235]	train-rmse:0.451395
[236]	train-rmse:0.451298
[237]	train-rmse:0.451218
[238]	train-rmse:0.451152
[239]	train-rmse:0.451062
[240]	train-rmse:0.450993
[241]	train-rmse:0.450912
[242]	train-rmse:0.450837
[243]	train-rmse:0.450754
[244]	train-rmse:0.450689
[245]	train-rmse:0.450621
[246]	train-rmse:0.450544
[247]	train-rmse:0.450485
[248]	train-rmse:0.450406
[249]	train-rmse:0.450344
[250]	train-rmse:0.450276
[251]	train-rmse:0.4502
[252]	train-rmse:0.450137
[253]	train-rmse:0.450073
[254]	train-rmse:0.450016
[255]	train-rmse:0.449958
[256]	train-rmse:0.449896
[257]	train-rmse:0.449833
[258]	train-rmse:0.449778
[259]	train-rmse:0.44973
[260]	train-rmse:0.449669
[261]	train-rmse:0.449609
[262]	train-rmse:0.449556
[263]	train-rmse:0.449499
[264]	train-rmse:0.449441
[265]	train-rmse:0.449383
[266]	train-rmse:0.449338
[267]	train-rmse:0.449295
[268]	train-rmse:0.449249
[269]	train-rmse:0.449203
[270]	train-rmse:0.449161
[271]	train-rmse:0.449108
[272]	train-rmse:0.449064
[273]	train-rmse:0.449001
[274]	train-rmse:0.448946
[275]	train-rmse:0.4489
[276]	train-rmse:0.44885
[277]	train-rmse:0.448814
[278]	train-rmse:0.448766
[279]	train-rmse:0.448714
[280]	train-rmse:0.448672
[281]	train-rmse:0.448632
[282]	train-rmse:0.448587
[283]	train-rmse:0.448556
[284]	train-rmse:0.448516
[285]	train-rmse:0.44847
[286]	train-rmse:0.448433
[287]	train-rmse:0.448396
[288]	train-rmse:0.448365
[289]	train-rmse:0.448325
[290]	train-rmse:0.448282
[291]	train-rmse:0.448256
[292]	train-rmse:0.448212
[293]	train-rmse:0.448171
[294]	train-rmse:0.448128
[295]	train-rmse:0.448097
[296]	train-rmse:0.448054
[297]	train-rmse:0.448023
[298]	train-rmse:0.447988
[299]	train-rmse:0.447952
[300]	train-rmse:0.447915
[301]	train-rmse:0.447886
[302]	train-rmse:0.447858
[303]	train-rmse:0.447817
[304]	train-rmse:0.447798
[305]	train-rmse:0.44775
[306]	train-rmse:0.447718
[307]	train-rmse:0.447684
[308]	train-rmse:0.447649
[309]	train-rmse:0.447619
[310]	train-rmse:0.447584
[311]	train-rmse:0.44755
[312]	train-rmse:0.447526
[313]	train-rmse:0.447496
[314]	train-rmse:0.447467
[315]	train-rmse:0.447444
[316]	train-rmse:0.447416
[317]	train-rmse:0.447391
[318]	train-rmse:0.447357
[319]	train-rmse:0.447333
[320]	train-rmse:0.447302
[321]	train-rmse:0.447277
[322]	train-rmse:0.447245
[323]	train-rmse:0.447214
[324]	train-rmse:0.447188
[325]	train-rmse:0.44715
[326]	train-rmse:0.447119
[327]	train-rmse:0.447075
[328]	train-rmse:0.447039
[329]	train-rmse:0.447009
[330]	train-rmse:0.446983
[331]	train-rmse:0.446961
[332]	train-rmse:0.446932
[333]	train-rmse:0.446897
[334]	train-rmse:0.44687
[335]	train-rmse:0.446843
[336]	train-rmse:0.446823
[337]	train-rmse:0.446788
[338]	train-rmse:0.446771
[339]	train-rmse:0.446752
[340]	train-rmse:0.446716
[341]	train-rmse:0.446691
[342]	train-rmse:0.446654
[343]	train-rmse:0.446636
[344]	train-rmse:0.446605
[345]	train-rmse:0.446585
[346]	train-rmse:0.44656
[347]	train-rmse:0.446534
[348]	train-rmse:0.446515
[349]	train-rmse:0.446491
[350]	train-rmse:0.446464
[351]	train-rmse:0.446443
[352]	train-rmse:0.446405
[353]	train-rmse:0.446376
[354]	train-rmse:0.446358
[355]	train-rmse:0.44633
[356]	train-rmse:0.446308
[357]	train-rmse:0.446285
[358]	train-rmse:0.446265
[359]	train-rmse:0.446238
[360]	train-rmse:0.446219
[361]	train-rmse:0.446198
[362]	train-rmse:0.446177
[363]	train-rmse:0.44616
[364]	train-rmse:0.446125
[365]	train-rmse:0.446105
[366]	train-rmse:0.44608
[367]	train-rmse:0.446047
[368]	train-rmse:0.446026
[369]	train-rmse:0.446001
[370]	train-rmse:0.445976
[371]	train-rmse:0.445947
[372]	train-rmse:0.445925
[373]	train-rmse:0.445906
[374]	train-rmse:0.445881
[375]	train-rmse:0.445864
[376]	train-rmse:0.445844
[377]	train-rmse:0.445827
[378]	train-rmse:0.44581
[379]	train-rmse:0.445788
[380]	train-rmse:0.445776
[381]	train-rmse:0.445757
[382]	train-rmse:0.445738
[383]	train-rmse:0.445717
[384]	train-rmse:0.445701
[385]	train-rmse:0.445685
[386]	train-rmse:0.445673
[387]	train-rmse:0.445651
[388]	train-rmse:0.445629
[389]	train-rmse:0.445606
[390]	train-rmse:0.44559
[391]	train-rmse:0.445577
[392]	train-rmse:0.445559
[393]	train-rmse:0.445531
[394]	train-rmse:0.445516
[395]	train-rmse:0.445493
[396]	train-rmse:0.445471
[397]	train-rmse:0.445439
[398]	train-rmse:0.445424
[399]	train-rmse:0.445412
[400]	train-rmse:0.445393
[401]	train-rmse:0.445377
[402]	train-rmse:0.445366
[403]	train-rmse:0.445348
[404]	train-rmse:0.445335
[405]	train-rmse:0.445312
[406]	train-rmse:0.4453
[407]	train-rmse:0.445282
[408]	train-rmse:0.445273
[409]	train-rmse:0.445254
[410]	train-rmse:0.445239
[411]	train-rmse:0.445216
[412]	train-rmse:0.445199
[413]	train-rmse:0.445179
[414]	train-rmse:0.445158
[415]	train-rmse:0.445139
[416]	train-rmse:0.445118
[417]	train-rmse:0.445102
[418]	train-rmse:0.445075
[419]	train-rmse:0.445062
[420]	train-rmse:0.445042
[421]	train-rmse:0.445026
[422]	train-rmse:0.445006
[423]	train-rmse:0.444975
[424]	train-rmse:0.444948
[425]	train-rmse:0.444934
[426]	train-rmse:0.444922
[427]	train-rmse:0.444898
[428]	train-rmse:0.444876
[429]	train-rmse:0.444862
[430]	train-rmse:0.444845
[431]	train-rmse:0.444825
[432]	train-rmse:0.444811
[433]	train-rmse:0.444799
[434]	train-rmse:0.444782
[435]	train-rmse:0.444769
[436]	train-rmse:0.444756
[437]	train-rmse:0.444738
[438]	train-rmse:0.44472
[439]	train-rmse:0.444699
[440]	train-rmse:0.444688
[441]	train-rmse:0.444666
[442]	train-rmse:0.444653
[443]	train-rmse:0.444631
[444]	train-rmse:0.444611
[445]	train-rmse:0.444587
[446]	train-rmse:0.444567
[447]	train-rmse:0.44454
[448]	train-rmse:0.444526
[449]	train-rmse:0.44451
[450]	train-rmse:0.444497
[451]	train-rmse:0.444479
[452]	train-rmse:0.444465
[453]	train-rmse:0.444452
[454]	train-rmse:0.444441
[455]	train-rmse:0.444427
[456]	train-rmse:0.444404
[457]	train-rmse:0.444387
[458]	train-rmse:0.444372
[459]	train-rmse:0.444361
[460]	train-rmse:0.444337
[461]	train-rmse:0.444324
[462]	train-rmse:0.444309
[463]	train-rmse:0.444299
[464]	train-rmse:0.444286
[465]	train-rmse:0.444268
[466]	train-rmse:0.444256
[467]	train-rmse:0.444242
[468]	train-rmse:0.444217
[469]	train-rmse:0.444205
[470]	train-rmse:0.444193
[471]	train-rmse:0.444181
[472]	train-rmse:0.444168
[473]	train-rmse:0.444157
[474]	train-rmse:0.444142
[475]	train-rmse:0.444124
[476]	train-rmse:0.444107
[477]	train-rmse:0.444097
[478]	train-rmse:0.444079
[479]	train-rmse:0.44407
[480]	train-rmse:0.444064
[481]	train-rmse:0.444055
[482]	train-rmse:0.444042
[483]	train-rmse:0.444026
[484]	train-rmse:0.444018
[485]	train-rmse:0.444
[486]	train-rmse:0.443986
[487]	train-rmse:0.443967
[488]	train-rmse:0.443951
[489]	train-rmse:0.443941
[490]	train-rmse:0.443928
[491]	train-rmse:0.443916
[492]	train-rmse:0.4439
[493]	train-rmse:0.443884
[494]	train-rmse:0.443873
[495]	train-rmse:0.44386
[496]	train-rmse:0.443842
[497]	train-rmse:0.443831
[498]	train-rmse:0.443817
[499]	train-rmse:0.443806
[500]	train-rmse:0.44379
[501]	train-rmse:0.443778
[502]	train-rmse:0.443765
[503]	train-rmse:0.443759
[504]	train-rmse:0.443749
[505]	train-rmse:0.443732
[506]	train-rmse:0.443718
[507]	train-rmse:0.443708
[508]	train-rmse:0.443689
[509]	train-rmse:0.443679
[510]	train-rmse:0.443661
[511]	train-rmse:0.443654
[512]	train-rmse:0.443645
[513]	train-rmse:0.443631
[514]	train-rmse:0.443621
[515]	train-rmse:0.443612
[516]	train-rmse:0.443601
[517]	train-rmse:0.443588
[518]	train-rmse:0.44358
[519]	train-rmse:0.443574
[520]	train-rmse:0.443559
[521]	train-rmse:0.443548
[522]	train-rmse:0.443537
[523]	train-rmse:0.443527
[524]	train-rmse:0.443511
[525]	train-rmse:0.443501
[526]	train-rmse:0.443491
[527]	train-rmse:0.443477
[528]	train-rmse:0.443463
[529]	train-rmse:0.443453
[530]	train-rmse:0.443443
[531]	train-rmse:0.443429
[532]	train-rmse:0.443413
[533]	train-rmse:0.443403
[534]	train-rmse:0.443393
[535]	train-rmse:0.443383
[536]	train-rmse:0.443374
[537]	train-rmse:0.443361
[538]	train-rmse:0.443351
[539]	train-rmse:0.443344
[540]	train-rmse:0.443335
[541]	train-rmse:0.443319
[542]	train-rmse:0.443311
[543]	train-rmse:0.443305
[544]	train-rmse:0.443285
[545]	train-rmse:0.443275
[546]	train-rmse:0.44326
[547]	train-rmse:0.443249
[548]	train-rmse:0.443235
[549]	train-rmse:0.443225
[550]	train-rmse:0.443217
[551]	train-rmse:0.443207
[552]	train-rmse:0.443199
[553]	train-rmse:0.443185
[554]	train-rmse:0.443177
[555]	train-rmse:0.443162
[556]	train-rmse:0.443145
[557]	train-rmse:0.443129
[558]	train-rmse:0.443115
[559]	train-rmse:0.443105
[560]	train-rmse:0.443095
[561]	train-rmse:0.443081
[562]	train-rmse:0.443067
[563]	train-rmse:0.443054
[564]	train-rmse:0.443046
[565]	train-rmse:0.443032
[566]	train-rmse:0.443022
[567]	train-rmse:0.443012
[568]	train-rmse:0.442994
[569]	train-rmse:0.442987
[570]	train-rmse:0.442974
[571]	train-rmse:0.442966
[572]	train-rmse:0.442954
[573]	train-rmse:0.442946
[574]	train-rmse:0.44293
[575]	train-rmse:0.442922
[576]	train-rmse:0.44291
[577]	train-rmse:0.442896
[578]	train-rmse:0.442885
[579]	train-rmse:0.442878
[580]	train-rmse:0.44287
[581]	train-rmse:0.442856
[582]	train-rmse:0.442847
[583]	train-rmse:0.442839
[584]	train-rmse:0.442829
[585]	train-rmse:0.44282
[586]	train-rmse:0.442804
[587]	train-rmse:0.442786
[588]	train-rmse:0.442775
[589]	train-rmse:0.442768
[590]	train-rmse:0.442755
[591]	train-rmse:0.442744
[592]	train-rmse:0.442736
[593]	train-rmse:0.442732
[594]	train-rmse:0.442725
[595]	train-rmse:0.44272
[596]	train-rmse:0.442711
[597]	train-rmse:0.442703
[598]	train-rmse:0.442694
[599]	train-rmse:0.442685
[600]	train-rmse:0.442676
[601]	train-rmse:0.442668
[602]	train-rmse:0.442653
[603]	train-rmse:0.442641
[604]	train-rmse:0.442627
[605]	train-rmse:0.442619
[606]	train-rmse:0.442604
[607]	train-rmse:0.442594
[608]	train-rmse:0.442578
[609]	train-rmse:0.442562
[610]	train-rmse:0.442554
[611]	train-rmse:0.442545
[612]	train-rmse:0.442535
[613]	train-rmse:0.442524
[614]	train-rmse:0.442519
[615]	train-rmse:0.442512
[616]	train-rmse:0.442504
[617]	train-rmse:0.442495
[618]	train-rmse:0.442481
[619]	train-rmse:0.442476
[620]	train-rmse:0.442458
[621]	train-rmse:0.442447
[622]	train-rmse:0.442439
[623]	train-rmse:0.442431
[624]	train-rmse:0.442425
[625]	train-rmse:0.442418
[626]	train-rmse:0.442407
[627]	train-rmse:0.442398
[628]	train-rmse:0.442386
[629]	train-rmse:0.442375
[630]	train-rmse:0.442364
[631]	train-rmse:0.442357
[632]	train-rmse:0.442351
[633]	train-rmse:0.442345
[634]	train-rmse:0.442333
[635]	train-rmse:0.442321
[636]	train-rmse:0.442311
[637]	train-rmse:0.442305
[638]	train-rmse:0.442299
[639]	train-rmse:0.442295
[640]	train-rmse:0.442285
[641]	train-rmse:0.442277
[642]	train-rmse:0.442271
[643]	train-rmse:0.442262
[644]	train-rmse:0.442253
[645]	train-rmse:0.442243
[646]	train-rmse:0.442233
[647]	train-rmse:0.44222
[648]	train-rmse:0.442212
[649]	train-rmse:0.442208
[650]	train-rmse:0.442198
[651]	train-rmse:0.442191
[652]	train-rmse:0.442186
[653]	train-rmse:0.442176
[654]	train-rmse:0.442167
[655]	train-rmse:0.442159
[656]	train-rmse:0.442153
[657]	train-rmse:0.442147
[658]	train-rmse:0.442134
[659]	train-rmse:0.442123
[660]	train-rmse:0.442116
[661]	train-rmse:0.442106
[662]	train-rmse:0.442099
[663]	train-rmse:0.442092
[664]	train-rmse:0.442085
[665]	train-rmse:0.442069
[666]	train-rmse:0.442056
[667]	train-rmse:0.442048
[668]	train-rmse:0.442037
[669]	train-rmse:0.442031
[670]	train-rmse:0.442024
[671]	train-rmse:0.442013
[672]	train-rmse:0.442006
[673]	train-rmse:0.441996
[674]	train-rmse:0.441987
[675]	train-rmse:0.441981
[676]	train-rmse:0.441971
[677]	train-rmse:0.441966
[678]	train-rmse:0.441954
[679]	train-rmse:0.441946
[680]	train-rmse:0.441941
[681]	train-rmse:0.441934
[682]	train-rmse:0.441929
[683]	train-rmse:0.441924
[684]	train-rmse:0.44191
[685]	train-rmse:0.441904
[686]	train-rmse:0.441891
[687]	train-rmse:0.441886
[688]	train-rmse:0.441869
[689]	train-rmse:0.44186
[690]	train-rmse:0.441852
[691]	train-rmse:0.441843
[692]	train-rmse:0.441832
[693]	train-rmse:0.441824
[694]	train-rmse:0.441813
[695]	train-rmse:0.441808
[696]	train-rmse:0.441799
[697]	train-rmse:0.441794
[698]	train-rmse:0.441786
[699]	train-rmse:0.441779
[700]	train-rmse:0.441771
[701]	train-rmse:0.441761
[702]	train-rmse:0.441755
[703]	train-rmse:0.441739
[704]	train-rmse:0.441734
[705]	train-rmse:0.441725
[706]	train-rmse:0.441715
[707]	train-rmse:0.441704
[708]	train-rmse:0.441697
[709]	train-rmse:0.44169
[710]	train-rmse:0.441683
[711]	train-rmse:0.441679
[712]	train-rmse:0.441669
[713]	train-rmse:0.441663
[714]	train-rmse:0.441654
[715]	train-rmse:0.441646
[716]	train-rmse:0.44164
[717]	train-rmse:0.441634
[718]	train-rmse:0.441628
[719]	train-rmse:0.441622
[720]	train-rmse:0.441615
[721]	train-rmse:0.44161
[722]	train-rmse:0.441604
[723]	train-rmse:0.441596
[724]	train-rmse:0.441587
[725]	train-rmse:0.441577
[726]	train-rmse:0.441569
[727]	train-rmse:0.44156
[728]	train-rmse:0.441554
[729]	train-rmse:0.441546
[730]	train-rmse:0.441538
[731]	train-rmse:0.44153
[732]	train-rmse:0.441523
[733]	train-rmse:0.441514
[734]	train-rmse:0.441504
[735]	train-rmse:0.441497
[736]	train-rmse:0.441493
[737]	train-rmse:0.441485
[738]	train-rmse:0.441479
[739]	train-rmse:0.441474
[740]	train-rmse:0.441464
[741]	train-rmse:0.441459
[742]	train-rmse:0.44145
[743]	train-rmse:0.441441
[744]	train-rmse:0.441428
[745]	train-rmse:0.441421
[746]	train-rmse:0.441411
[747]	train-rmse:0.441405
[748]	train-rmse:0.441396
[749]	train-rmse:0.441389
[750]	train-rmse:0.441379
[751]	train-rmse:0.441375
[752]	train-rmse:0.441365
[753]	train-rmse:0.441358
[754]	train-rmse:0.441349
[755]	train-rmse:0.441341
[756]	train-rmse:0.441335
[757]	train-rmse:0.441328
[758]	train-rmse:0.441322
[759]	train-rmse:0.441315
[760]	train-rmse:0.441308
[761]	train-rmse:0.441301
[762]	train-rmse:0.441292
[763]	train-rmse:0.441286
[764]	train-rmse:0.441275
[765]	train-rmse:0.441264
[766]	train-rmse:0.44125
[767]	train-rmse:0.441245
[768]	train-rmse:0.441241
[769]	train-rmse:0.441235
[770]	train-rmse:0.441228
[771]	train-rmse:0.44122
[772]	train-rmse:0.441215
[773]	train-rmse:0.441204
[774]	train-rmse:0.441195
[775]	train-rmse:0.441186
[776]	train-rmse:0.441182
[777]	train-rmse:0.441177
[778]	train-rmse:0.441171
[779]	train-rmse:0.441164
[780]	train-rmse:0.441153
[781]	train-rmse:0.441148
[782]	train-rmse:0.441142
[783]	train-rmse:0.441136
[784]	train-rmse:0.44113
[785]	train-rmse:0.44112
[786]	train-rmse:0.441116
[787]	train-rmse:0.441108
[788]	train-rmse:0.441101
[789]	train-rmse:0.441096
[790]	train-rmse:0.441091
[791]	train-rmse:0.441085
[792]	train-rmse:0.441079
[793]	train-rmse:0.441073
[794]	train-rmse:0.441067
[795]	train-rmse:0.441061
[796]	train-rmse:0.441054
[797]	train-rmse:0.441049
[798]	train-rmse:0.44104
[799]	train-rmse:0.441035
[800]	train-rmse:0.441027
[801]	train-rmse:0.441022
[802]	train-rmse:0.441017
[803]	train-rmse:0.44101
[804]	train-rmse:0.441004
[805]	train-rmse:0.440999
[806]	train-rmse:0.440995
[807]	train-rmse:0.440987
[808]	train-rmse:0.440982
[809]	train-rmse:0.440976
[810]	train-rmse:0.440971
[811]	train-rmse:0.440965
[812]	train-rmse:0.440962
[813]	train-rmse:0.440955
[814]	train-rmse:0.440946
[815]	train-rmse:0.440938
[816]	train-rmse:0.440931
[817]	train-rmse:0.440925
[818]	train-rmse:0.440917
[819]	train-rmse:0.440912
[820]	train-rmse:0.440906
[821]	train-rmse:0.440899
[822]	train-rmse:0.440894
[823]	train-rmse:0.440888
[824]	train-rmse:0.440881
[825]	train-rmse:0.440878
[826]	train-rmse:0.440875
[827]	train-rmse:0.440869
[828]	train-rmse:0.440864
[829]	train-rmse:0.440858
[830]	train-rmse:0.440852
[831]	train-rmse:0.440842
[832]	train-rmse:0.440835
[833]	train-rmse:0.440828
[834]	train-rmse:0.440823
[835]	train-rmse:0.440815
[836]	train-rmse:0.440809
[837]	train-rmse:0.440803
[838]	train-rmse:0.440798
[839]	train-rmse:0.44079
[840]	train-rmse:0.440782
[841]	train-rmse:0.440775
[842]	train-rmse:0.44077
[843]	train-rmse:0.440765
[844]	train-rmse:0.440762
[845]	train-rmse:0.440755
[846]	train-rmse:0.440746
[847]	train-rmse:0.440742
[848]	train-rmse:0.440739
[849]	train-rmse:0.440732
[850]	train-rmse:0.440726
[851]	train-rmse:0.440721
[852]	train-rmse:0.440715
[853]	train-rmse:0.44071
[854]	train-rmse:0.440705
[855]	train-rmse:0.440696
[856]	train-rmse:0.440689
[857]	train-rmse:0.440685
[858]	train-rmse:0.440681
[859]	train-rmse:0.440676
[860]	train-rmse:0.440668
[861]	train-rmse:0.440658
[862]	train-rmse:0.440654
[863]	train-rmse:0.440648
[864]	train-rmse:0.440641
[865]	train-rmse:0.440632
[866]	train-rmse:0.440622
[867]	train-rmse:0.440616
[868]	train-rmse:0.440612
[869]	train-rmse:0.440607
[870]	train-rmse:0.440602
[871]	train-rmse:0.440595
[872]	train-rmse:0.440588
[873]	train-rmse:0.440586
[874]	train-rmse:0.440581
[875]	train-rmse:0.440575
[876]	train-rmse:0.44057
[877]	train-rmse:0.440566
[878]	train-rmse:0.440559
[879]	train-rmse:0.440553
[880]	train-rmse:0.440548
[881]	train-rmse:0.440535
[882]	train-rmse:0.440529
[883]	train-rmse:0.440522
[884]	train-rmse:0.440518
[885]	train-rmse:0.440508
[886]	train-rmse:0.440504
[887]	train-rmse:0.440496
[888]	train-rmse:0.440493
[889]	train-rmse:0.440486
[890]	train-rmse:0.440483
[891]	train-rmse:0.440477
[892]	train-rmse:0.440469
[893]	train-rmse:0.440464
[894]	train-rmse:0.440458
[895]	train-rmse:0.440454
[896]	train-rmse:0.440449
[897]	train-rmse:0.440443
[898]	train-rmse:0.440438
[899]	train-rmse:0.440433
[900]	train-rmse:0.44043
[901]	train-rmse:0.440424
[902]	train-rmse:0.440416
[903]	train-rmse:0.440411
[904]	train-rmse:0.440408
[905]	train-rmse:0.440401
[906]	train-rmse:0.440389
[907]	train-rmse:0.440382
[908]	train-rmse:0.440376
[909]	train-rmse:0.440372
[910]	train-rmse:0.440366
[911]	train-rmse:0.440359
[912]	train-rmse:0.440352
[913]	train-rmse:0.44035
[914]	train-rmse:0.440344
[915]	train-rmse:0.44034
[916]	train-rmse:0.440334
[917]	train-rmse:0.44033
[918]	train-rmse:0.440322
[919]	train-rmse:0.440318
[920]	train-rmse:0.440312
[921]	train-rmse:0.440305
[922]	train-rmse:0.4403
[923]	train-rmse:0.440295
[924]	train-rmse:0.44029
[925]	train-rmse:0.440282
[926]	train-rmse:0.440278
[927]	train-rmse:0.440273
[928]	train-rmse:0.440265
[929]	train-rmse:0.440259
[930]	train-rmse:0.440255
[931]	train-rmse:0.440246
[932]	train-rmse:0.440243
[933]	train-rmse:0.440237
[934]	train-rmse:0.440231
[935]	train-rmse:0.440222
[936]	train-rmse:0.440218
[937]	train-rmse:0.440214
[938]	train-rmse:0.440209
[939]	train-rmse:0.440204
[940]	train-rmse:0.440198
[941]	train-rmse:0.440192
[942]	train-rmse:0.440188
[943]	train-rmse:0.440182
[944]	train-rmse:0.440178
[945]	train-rmse:0.440171
[946]	train-rmse:0.440166
[947]	train-rmse:0.44016
[948]	train-rmse:0.440151
[949]	train-rmse:0.440145
[950]	train-rmse:0.440142
[951]	train-rmse:0.440136
[952]	train-rmse:0.440132
[953]	train-rmse:0.440127
[954]	train-rmse:0.440123
[955]	train-rmse:0.440117
[956]	train-rmse:0.440113
[957]	train-rmse:0.440108
[958]	train-rmse:0.440103
[959]	train-rmse:0.440096
[960]	train-rmse:0.44009
[961]	train-rmse:0.440088
[962]	train-rmse:0.440083
[963]	train-rmse:0.440076
[964]	train-rmse:0.440069
[965]	train-rmse:0.440061
[966]	train-rmse:0.440054
[967]	train-rmse:0.440047
[968]	train-rmse:0.440039
[969]	train-rmse:0.440034
[970]	train-rmse:0.440029
[971]	train-rmse:0.440024
[972]	train-rmse:0.440019
[973]	train-rmse:0.440016
[974]	train-rmse:0.44001
[975]	train-rmse:0.440002
[976]	train-rmse:0.439999
[977]	train-rmse:0.439995
[978]	train-rmse:0.439987
[979]	train-rmse:0.43998
[980]	train-rmse:0.439976
[981]	train-rmse:0.439973
[982]	train-rmse:0.439967
[983]	train-rmse:0.43996
[984]	train-rmse:0.439954
[985]	train-rmse:0.439946
[986]	train-rmse:0.439942
[987]	train-rmse:0.439933
[988]	train-rmse:0.439927
[989]	train-rmse:0.439923
[990]	train-rmse:0.439919
[991]	train-rmse:0.439912
[992]	train-rmse:0.439909
[993]	train-rmse:0.439905
[994]	train-rmse:0.439898
[995]	train-rmse:0.439892
[996]	train-rmse:0.439887
[997]	train-rmse:0.439883
[998]	train-rmse:0.43988
[999]	train-rmse:0.439876
[1000]	train-rmse:0.43987
[1001]	train-rmse:0.439865
[1002]	train-rmse:0.439862
[1003]	train-rmse:0.439857
[1004]	train-rmse:0.439852
[1005]	train-rmse:0.439848
[1006]	train-rmse:0.439843
[1007]	train-rmse:0.439838
[1008]	train-rmse:0.439834
[1009]	train-rmse:0.439831
[1010]	train-rmse:0.439823
[1011]	train-rmse:0.439821
[1012]	train-rmse:0.439817
[1013]	train-rmse:0.439813
[1014]	train-rmse:0.439807
[1015]	train-rmse:0.4398
[1016]	train-rmse:0.439797
[1017]	train-rmse:0.439792
[1018]	train-rmse:0.439788
[1019]	train-rmse:0.439786
[1020]	train-rmse:0.439783
[1021]	train-rmse:0.439775
[1022]	train-rmse:0.439771
[1023]	train-rmse:0.439764
[1024]	train-rmse:0.439761
[1025]	train-rmse:0.439757
[1026]	train-rmse:0.439751
[1027]	train-rmse:0.439747
[1028]	train-rmse:0.439742
[1029]	train-rmse:0.439736
[1030]	train-rmse:0.439732
[1031]	train-rmse:0.439729
[1032]	train-rmse:0.439721
[1033]	train-rmse:0.439718
[1034]	train-rmse:0.439714
[1035]	train-rmse:0.43971
[1036]	train-rmse:0.439704
[1037]	train-rmse:0.439701
[1038]	train-rmse:0.439696
[1039]	train-rmse:0.439689
[1040]	train-rmse:0.439686
[1041]	train-rmse:0.439683
[1042]	train-rmse:0.439679
[1043]	train-rmse:0.439674
[1044]	train-rmse:0.43967
[1045]	train-rmse:0.439668
[1046]	train-rmse:0.43966
[1047]	train-rmse:0.439654
[1048]	train-rmse:0.439649
[1049]	train-rmse:0.439646
[1050]	train-rmse:0.439641
[1051]	train-rmse:0.439636
[1052]	train-rmse:0.439633
[1053]	train-rmse:0.439629
[1054]	train-rmse:0.439623
[1055]	train-rmse:0.439617
[1056]	train-rmse:0.439614
[1057]	train-rmse:0.439608
[1058]	train-rmse:0.439603
[1059]	train-rmse:0.439598
[1060]	train-rmse:0.439595
[1061]	train-rmse:0.439591
[1062]	train-rmse:0.439586
[1063]	train-rmse:0.439579
[1064]	train-rmse:0.439571
[1065]	train-rmse:0.439568
[1066]	train-rmse:0.439561
[1067]	train-rmse:0.439553
[1068]	train-rmse:0.439548
[1069]	train-rmse:0.439545
[1070]	train-rmse:0.43954
[1071]	train-rmse:0.439537
[1072]	train-rmse:0.439528
[1073]	train-rmse:0.439521
[1074]	train-rmse:0.439516
[1075]	train-rmse:0.439512
[1076]	train-rmse:0.439506
[1077]	train-rmse:0.439501
[1078]	train-rmse:0.439497
[1079]	train-rmse:0.439495
[1080]	train-rmse:0.439492
[1081]	train-rmse:0.439487
[1082]	train-rmse:0.439482
[1083]	train-rmse:0.439477
[1084]	train-rmse:0.439474
[1085]	train-rmse:0.43947
[1086]	train-rmse:0.439463
[1087]	train-rmse:0.43946
[1088]	train-rmse:0.439456
[1089]	train-rmse:0.439452
[1090]	train-rmse:0.439448
[1091]	train-rmse:0.439444
[1092]	train-rmse:0.439439
[1093]	train-rmse:0.439436
[1094]	train-rmse:0.439432
[1095]	train-rmse:0.439427
[1096]	train-rmse:0.439421
[1097]	train-rmse:0.439415
[1098]	train-rmse:0.439409
[1099]	train-rmse:0.439404
[1100]	train-rmse:0.4394
[1101]	train-rmse:0.439393
[1102]	train-rmse:0.439387
[1103]	train-rmse:0.43938
[1104]	train-rmse:0.439374
[1105]	train-rmse:0.439369
[1106]	train-rmse:0.439365
[1107]	train-rmse:0.439362
[1108]	train-rmse:0.439355
[1109]	train-rmse:0.43935
[1110]	train-rmse:0.439344
[1111]	train-rmse:0.439339
[1112]	train-rmse:0.439336
[1113]	train-rmse:0.439328
[1114]	train-rmse:0.439325
[1115]	train-rmse:0.439321
[1116]	train-rmse:0.439315
[1117]	train-rmse:0.439311
[1118]	train-rmse:0.439302
[1119]	train-rmse:0.439297
[1120]	train-rmse:0.439291
[1121]	train-rmse:0.43929
[1122]	train-rmse:0.439283
[1123]	train-rmse:0.439281
[1124]	train-rmse:0.439274
[1125]	train-rmse:0.439269
[1126]	train-rmse:0.439262
[1127]	train-rmse:0.439259
[1128]	train-rmse:0.439256
[1129]	train-rmse:0.439253
[1130]	train-rmse:0.439247
[1131]	train-rmse:0.439242
[1132]	train-rmse:0.439239
[1133]	train-rmse:0.439234
[1134]	train-rmse:0.43923
[1135]	train-rmse:0.439227
[1136]	train-rmse:0.439224
[1137]	train-rmse:0.439219
[1138]	train-rmse:0.439216
[1139]	train-rmse:0.439212
[1140]	train-rmse:0.439208
[1141]	train-rmse:0.439203
[1142]	train-rmse:0.439199
[1143]	train-rmse:0.439195
[1144]	train-rmse:0.43919
[1145]	train-rmse:0.439184
[1146]	train-rmse:0.439179
[1147]	train-rmse:0.439176
[1148]	train-rmse:0.439173
[1149]	train-rmse:0.43917
[1150]	train-rmse:0.439163
[1151]	train-rmse:0.43916
[1152]	train-rmse:0.439157
[1153]	train-rmse:0.439152
[1154]	train-rmse:0.439144
[1155]	train-rmse:0.439139
[1156]	train-rmse:0.439135
[1157]	train-rmse:0.439131
[1158]	train-rmse:0.439127
[1159]	train-rmse:0.439123
[1160]	train-rmse:0.439118
[1161]	train-rmse:0.439115
[1162]	train-rmse:0.439113
[1163]	train-rmse:0.439109
[1164]	train-rmse:0.439105
[1165]	train-rmse:0.4391
[1166]	train-rmse:0.439096
[1167]	train-rmse:0.439091
[1168]	train-rmse:0.439085
[1169]	train-rmse:0.43908
[1170]	train-rmse:0.439075
[1171]	train-rmse:0.439069
[1172]	train-rmse:0.439064
[1173]	train-rmse:0.439057
[1174]	train-rmse:0.439051
[1175]	train-rmse:0.439047
[1176]	train-rmse:0.439043
[1177]	train-rmse:0.439036
[1178]	train-rmse:0.439029
[1179]	train-rmse:0.439025
[1180]	train-rmse:0.439019
[1181]	train-rmse:0.439016
[1182]	train-rmse:0.439012
[1183]	train-rmse:0.439006
[1184]	train-rmse:0.439
[1185]	train-rmse:0.438997
[1186]	train-rmse:0.438993
[1187]	train-rmse:0.43899
[1188]	train-rmse:0.438987
[1189]	train-rmse:0.438983
[1190]	train-rmse:0.438975
[1191]	train-rmse:0.438972
[1192]	train-rmse:0.438967
[1193]	train-rmse:0.438965
[1194]	train-rmse:0.438962
[1195]	train-rmse:0.438958
[1196]	train-rmse:0.438955
[1197]	train-rmse:0.438949
[1198]	train-rmse:0.438944
[1199]	train-rmse:0.43894
[1200]	train-rmse:0.438935
[1201]	train-rmse:0.438932
[1202]	train-rmse:0.438929
[1203]	train-rmse:0.438924
[1204]	train-rmse:0.438918
[1205]	train-rmse:0.438914
[1206]	train-rmse:0.438907
[1207]	train-rmse:0.438902
[1208]	train-rmse:0.438898
[1209]	train-rmse:0.438896
[1210]	train-rmse:0.438891
[1211]	train-rmse:0.438887
[1212]	train-rmse:0.438881
[1213]	train-rmse:0.438874
[1214]	train-rmse:0.438869
[1215]	train-rmse:0.438864
[1216]	train-rmse:0.43886
[1217]	train-rmse:0.438855
[1218]	train-rmse:0.438851
[1219]	train-rmse:0.438848
[1220]	train-rmse:0.438843
[1221]	train-rmse:0.438838
[1222]	train-rmse:0.438834
[1223]	train-rmse:0.438829
[1224]	train-rmse:0.438823
[1225]	train-rmse:0.438819
[1226]	train-rmse:0.438815
[1227]	train-rmse:0.438809
[1228]	train-rmse:0.438806
[1229]	train-rmse:0.438801
[1230]	train-rmse:0.438797
[1231]	train-rmse:0.438793
[1232]	train-rmse:0.438789
[1233]	train-rmse:0.438786
[1234]	train-rmse:0.43878
[1235]	train-rmse:0.438775
[1236]	train-rmse:0.438771
[1237]	train-rmse:0.438767
[1238]	train-rmse:0.438762
[1239]	train-rmse:0.438758
[1240]	train-rmse:0.438755
[1241]	train-rmse:0.43875
[1242]	train-rmse:0.438747
[1243]	train-rmse:0.438743
[1244]	train-rmse:0.43874
[1245]	train-rmse:0.438736
[1246]	train-rmse:0.438732
[1247]	train-rmse:0.438728
[1248]	train-rmse:0.438726
[1249]	train-rmse:0.438721
[1250]	train-rmse:0.438718
[1251]	train-rmse:0.438714
[1252]	train-rmse:0.438711
[1253]	train-rmse:0.438708
[1254]	train-rmse:0.438705
[1255]	train-rmse:0.438701
[1256]	train-rmse:0.438695
[1257]	train-rmse:0.438691
[1258]	train-rmse:0.438684
[1259]	train-rmse:0.438679
[1260]	train-rmse:0.438672
[1261]	train-rmse:0.438668
[1262]	train-rmse:0.438665
[1263]	train-rmse:0.438662
[1264]	train-rmse:0.438657
[1265]	train-rmse:0.43865
[1266]	train-rmse:0.438646
[1267]	train-rmse:0.438643
[1268]	train-rmse:0.438638
[1269]	train-rmse:0.438636
[1270]	train-rmse:0.438632
[1271]	train-rmse:0.438628
[1272]	train-rmse:0.438623
[1273]	train-rmse:0.438618
[1274]	train-rmse:0.438615
[1275]	train-rmse:0.438608
[1276]	train-rmse:0.438606
[1277]	train-rmse:0.438603
[1278]	train-rmse:0.438598
[1279]	train-rmse:0.438596
[1280]	train-rmse:0.438592
[1281]	train-rmse:0.438589
[1282]	train-rmse:0.438586
[1283]	train-rmse:0.43858
[1284]	train-rmse:0.438578
[1285]	train-rmse:0.438574
[1286]	train-rmse:0.438573
[1287]	train-rmse:0.438569
[1288]	train-rmse:0.438566
[1289]	train-rmse:0.438561
[1290]	train-rmse:0.438558
[1291]	train-rmse:0.438555
[1292]	train-rmse:0.438551
[1293]	train-rmse:0.438548
[1294]	train-rmse:0.438545
[1295]	train-rmse:0.438541
[1296]	train-rmse:0.438536
[1297]	train-rmse:0.438531
[1298]	train-rmse:0.438529
[1299]	train-rmse:0.438527
[1300]	train-rmse:0.438521
[1301]	train-rmse:0.438519
[1302]	train-rmse:0.438516
[1303]	train-rmse:0.438511
[1304]	train-rmse:0.438505
[1305]	train-rmse:0.438501
[1306]	train-rmse:0.438496
[1307]	train-rmse:0.438491
[1308]	train-rmse:0.438485
[1309]	train-rmse:0.438482
[1310]	train-rmse:0.438478
[1311]	train-rmse:0.438474
[1312]	train-rmse:0.438471
[1313]	train-rmse:0.438466
[1314]	train-rmse:0.438462
[1315]	train-rmse:0.438458
[1316]	train-rmse:0.438454
[1317]	train-rmse:0.43845
[1318]	train-rmse:0.438446
[1319]	train-rmse:0.438444
[1320]	train-rmse:0.438442
[1321]	train-rmse:0.438438
[1322]	train-rmse:0.438435
[1323]	train-rmse:0.438431
[1324]	train-rmse:0.438428
[1325]	train-rmse:0.438423
[1326]	train-rmse:0.43842
[1327]	train-rmse:0.438416
[1328]	train-rmse:0.438413
[1329]	train-rmse:0.43841
[1330]	train-rmse:0.438407
[1331]	train-rmse:0.438403
[1332]	train-rmse:0.438399
[1333]	train-rmse:0.438397
[1334]	train-rmse:0.438393
[1335]	train-rmse:0.43839
[1336]	train-rmse:0.438388
[1337]	train-rmse:0.438386
[1338]	train-rmse:0.438382
[1339]	train-rmse:0.438378
[1340]	train-rmse:0.438375
[1341]	train-rmse:0.438373
[1342]	train-rmse:0.438368
[1343]	train-rmse:0.438363
[1344]	train-rmse:0.438359
[1345]	train-rmse:0.438356
[1346]	train-rmse:0.438354
[1347]	train-rmse:0.43835
[1348]	train-rmse:0.438345
[1349]	train-rmse:0.438338
[1350]	train-rmse:0.438334
[1351]	train-rmse:0.438331
[1352]	train-rmse:0.438325
[1353]	train-rmse:0.438324
[1354]	train-rmse:0.438319
[1355]	train-rmse:0.438316
[1356]	train-rmse:0.438311
[1357]	train-rmse:0.438309
[1358]	train-rmse:0.438307
[1359]	train-rmse:0.438302
[1360]	train-rmse:0.438299
[1361]	train-rmse:0.438298
[1362]	train-rmse:0.438295
[1363]	train-rmse:0.438292
[1364]	train-rmse:0.438287
[1365]	train-rmse:0.438285
[1366]	train-rmse:0.438281
[1367]	train-rmse:0.438276
[1368]	train-rmse:0.438273
[1369]	train-rmse:0.43827
[1370]	train-rmse:0.438267
[1371]	train-rmse:0.438265
[1372]	train-rmse:0.43826
[1373]	train-rmse:0.438255
[1374]	train-rmse:0.438253
[1375]	train-rmse:0.438249
[1376]	train-rmse:0.438245
[1377]	train-rmse:0.438242
[1378]	train-rmse:0.438237
[1379]	train-rmse:0.438233
[1380]	train-rmse:0.438227
[1381]	train-rmse:0.438225
[1382]	train-rmse:0.438218
[1383]	train-rmse:0.438215
[1384]	train-rmse:0.438213
[1385]	train-rmse:0.438211
[1386]	train-rmse:0.438208
[1387]	train-rmse:0.438205
[1388]	train-rmse:0.4382
[1389]	train-rmse:0.438198
[1390]	train-rmse:0.438195
[1391]	train-rmse:0.438192
[1392]	train-rmse:0.438187
[1393]	train-rmse:0.438182
[1394]	train-rmse:0.438178
[1395]	train-rmse:0.438174
[1396]	train-rmse:0.438172
[1397]	train-rmse:0.438165
[1398]	train-rmse:0.438162
[1399]	train-rmse:0.438157
[1400]	train-rmse:0.438155
[1401]	train-rmse:0.438153
[1402]	train-rmse:0.43815
[1403]	train-rmse:0.438146
[1404]	train-rmse:0.438142
[1405]	train-rmse:0.43814
[1406]	train-rmse:0.438138
[1407]	train-rmse:0.438135
[1408]	train-rmse:0.43813
[1409]	train-rmse:0.438127
[1410]	train-rmse:0.438123
[1411]	train-rmse:0.43812
[1412]	train-rmse:0.438116
[1413]	train-rmse:0.43811
[1414]	train-rmse:0.438107
[1415]	train-rmse:0.438104
[1416]	train-rmse:0.4381
[1417]	train-rmse:0.438096
[1418]	train-rmse:0.43809
[1419]	train-rmse:0.438086
[1420]	train-rmse:0.438084
[1421]	train-rmse:0.43808
[1422]	train-rmse:0.438075
[1423]	train-rmse:0.438072
[1424]	train-rmse:0.438068
[1425]	train-rmse:0.438064
[1426]	train-rmse:0.43806
[1427]	train-rmse:0.438056
[1428]	train-rmse:0.438053
[1429]	train-rmse:0.43805
[1430]	train-rmse:0.438048
[1431]	train-rmse:0.438044
[1432]	train-rmse:0.43804
[1433]	train-rmse:0.438036
[1434]	train-rmse:0.438033
[1435]	train-rmse:0.438029
[1436]	train-rmse:0.438026
[1437]	train-rmse:0.438024
[1438]	train-rmse:0.438021
[1439]	train-rmse:0.438017
[1440]	train-rmse:0.438015
[1441]	train-rmse:0.438012
[1442]	train-rmse:0.438009
[1443]	train-rmse:0.438006
[1444]	train-rmse:0.438002
[1445]	train-rmse:0.437999
[1446]	train-rmse:0.437996
[1447]	train-rmse:0.437994
[1448]	train-rmse:0.437989
[1449]	train-rmse:0.437985
[1450]	train-rmse:0.437982
[1451]	train-rmse:0.43798
[1452]	train-rmse:0.437977
[1453]	train-rmse:0.437976
[1454]	train-rmse:0.437972
[1455]	train-rmse:0.437968
[1456]	train-rmse:0.437963
[1457]	train-rmse:0.43796
[1458]	train-rmse:0.437957
[1459]	train-rmse:0.437952
[1460]	train-rmse:0.43795
[1461]	train-rmse:0.437947
[1462]	train-rmse:0.437944
[1463]	train-rmse:0.437942
[1464]	train-rmse:0.43794
[1465]	train-rmse:0.437936
[1466]	train-rmse:0.437933
[1467]	train-rmse:0.43793
[1468]	train-rmse:0.437926
[1469]	train-rmse:0.437924
[1470]	train-rmse:0.437918
[1471]	train-rmse:0.437914
[1472]	train-rmse:0.43791
[1473]	train-rmse:0.437907
[1474]	train-rmse:0.437905
[1475]	train-rmse:0.437899
[1476]	train-rmse:0.437895
[1477]	train-rmse:0.437894
[1478]	train-rmse:0.437891
[1479]	train-rmse:0.43789
[1480]	train-rmse:0.437885
[1481]	train-rmse:0.437882
[1482]	train-rmse:0.437878
[1483]	train-rmse:0.437875
[1484]	train-rmse:0.43787
[1485]	train-rmse:0.437866
[1486]	train-rmse:0.437863
[1487]	train-rmse:0.43786
[1488]	train-rmse:0.437858
[1489]	train-rmse:0.437853
[1490]	train-rmse:0.437849
[1491]	train-rmse:0.437843
[1492]	train-rmse:0.43784
[1493]	train-rmse:0.437837
[1494]	train-rmse:0.437835
[1495]	train-rmse:0.437831
[1496]	train-rmse:0.437828
[1497]	train-rmse:0.437825
[1498]	train-rmse:0.437822
[1499]	train-rmse:0.437818
[1500]	train-rmse:0.437813
[1501]	train-rmse:0.43781
[1502]	train-rmse:0.437806
[1503]	train-rmse:0.437802
[1504]	train-rmse:0.437799
[1505]	train-rmse:0.437795
[1506]	train-rmse:0.437792
[1507]	train-rmse:0.437788
[1508]	train-rmse:0.437784
[1509]	train-rmse:0.437782
[1510]	train-rmse:0.437777
[1511]	train-rmse:0.437772
[1512]	train-rmse:0.437768
[1513]	train-rmse:0.437766
[1514]	train-rmse:0.437762
[1515]	train-rmse:0.437759
[1516]	train-rmse:0.437755
[1517]	train-rmse:0.437754
[1518]	train-rmse:0.43775
[1519]	train-rmse:0.437749
[1520]	train-rmse:0.437746
[1521]	train-rmse:0.437744
[1522]	train-rmse:0.43774
[1523]	train-rmse:0.437737
[1524]	train-rmse:0.437733
[1525]	train-rmse:0.437731
[1526]	train-rmse:0.437727
[1527]	train-rmse:0.437724
[1528]	train-rmse:0.437721
[1529]	train-rmse:0.437718
[1530]	train-rmse:0.437714
[1531]	train-rmse:0.437711
[1532]	train-rmse:0.437708
[1533]	train-rmse:0.437705
[1534]	train-rmse:0.437703
[1535]	train-rmse:0.437699
[1536]	train-rmse:0.437697
[1537]	train-rmse:0.437693
[1538]	train-rmse:0.437689
[1539]	train-rmse:0.437687
[1540]	train-rmse:0.437682
[1541]	train-rmse:0.437679
[1542]	train-rmse:0.437676
[1543]	train-rmse:0.437674
[1544]	train-rmse:0.437669
[1545]	train-rmse:0.437663
[1546]	train-rmse:0.437659
[1547]	train-rmse:0.437657
[1548]	train-rmse:0.437655
[1549]	train-rmse:0.43765
[1550]	train-rmse:0.437646
[1551]	train-rmse:0.437643
[1552]	train-rmse:0.437639
[1553]	train-rmse:0.437636
[1554]	train-rmse:0.437633
[1555]	train-rmse:0.43763
[1556]	train-rmse:0.437628
[1557]	train-rmse:0.437624
[1558]	train-rmse:0.437622
[1559]	train-rmse:0.437618
[1560]	train-rmse:0.437614
[1561]	train-rmse:0.437609
[1562]	train-rmse:0.437603
[1563]	train-rmse:0.4376
[1564]	train-rmse:0.437596
[1565]	train-rmse:0.437592
[1566]	train-rmse:0.437589
[1567]	train-rmse:0.437585
[1568]	train-rmse:0.437583
[1569]	train-rmse:0.437581
[1570]	train-rmse:0.437578
[1571]	train-rmse:0.437574
[1572]	train-rmse:0.437571
[1573]	train-rmse:0.437567
[1574]	train-rmse:0.437564
[1575]	train-rmse:0.43756
[1576]	train-rmse:0.437556
[1577]	train-rmse:0.437554
[1578]	train-rmse:0.43755
[1579]	train-rmse:0.437547
[1580]	train-rmse:0.437545
[1581]	train-rmse:0.437542
[1582]	train-rmse:0.437539
[1583]	train-rmse:0.437536
[1584]	train-rmse:0.437532
[1585]	train-rmse:0.43753
[1586]	train-rmse:0.437527
[1587]	train-rmse:0.437522
[1588]	train-rmse:0.43752
[1589]	train-rmse:0.437518
[1590]	train-rmse:0.437516
[1591]	train-rmse:0.437512
[1592]	train-rmse:0.437509
[1593]	train-rmse:0.437506
[1594]	train-rmse:0.437503
[1595]	train-rmse:0.437498
[1596]	train-rmse:0.437495
[1597]	train-rmse:0.43749
[1598]	train-rmse:0.437488
[1599]	train-rmse:0.437483

In [34]:
bst.save_model('bst_1600_eta0015.model')

begin cv



In [10]:
gc.collect()


Out[10]:
16

In [32]:
#right now the best is this parameter set with 700 round
param_10 = {'booster':'gbtree',
         'nthread': 8,
         'max_depth':8, 
         'eta':0.015,
         'silent':1,
         'subsample':0.5, 
         'objective':'reg:linear',
         'eval_metric':'rmse',
         'colsample_bytree':0.5}

In [149]:
num_round = 400

cvresult = xgb.cv(param_10, dtrain_10, num_round, nfold=4,show_stdv=False,
                        seed = 0, early_stopping_rounds=5,verbose_eval = 1)
print(cvresult.tail())


[0]	train-rmse:1.34436	test-rmse:1.34436
[1]	train-rmse:1.32079	test-rmse:1.32079
[2]	train-rmse:1.29786	test-rmse:1.29787
[3]	train-rmse:1.27532	test-rmse:1.27533
[4]	train-rmse:1.2535	test-rmse:1.2535
[5]	train-rmse:1.2321	test-rmse:1.23211
[6]	train-rmse:1.21113	test-rmse:1.21114
[7]	train-rmse:1.19076	test-rmse:1.19077
[8]	train-rmse:1.17074	test-rmse:1.17076
[9]	train-rmse:1.15127	test-rmse:1.15128
[10]	train-rmse:1.13219	test-rmse:1.13221
[11]	train-rmse:1.11348	test-rmse:1.1135
[12]	train-rmse:1.09533	test-rmse:1.09535
[13]	train-rmse:1.07753	test-rmse:1.07755
[14]	train-rmse:1.06016	test-rmse:1.06018
[15]	train-rmse:1.04318	test-rmse:1.04321
[16]	train-rmse:1.02652	test-rmse:1.02655
[17]	train-rmse:1.01034	test-rmse:1.01037
[18]	train-rmse:0.994443	test-rmse:0.994482
[19]	train-rmse:0.978944	test-rmse:0.978986
[20]	train-rmse:0.963866	test-rmse:0.96391
[21]	train-rmse:0.949097	test-rmse:0.949142
[22]	train-rmse:0.934742	test-rmse:0.93479
[23]	train-rmse:0.920769	test-rmse:0.920819
[24]	train-rmse:0.907041	test-rmse:0.907093
[25]	train-rmse:0.893764	test-rmse:0.89382
[26]	train-rmse:0.880777	test-rmse:0.880837
[27]	train-rmse:0.868099	test-rmse:0.868161
[28]	train-rmse:0.855725	test-rmse:0.855788
[29]	train-rmse:0.843641	test-rmse:0.843709
[30]	train-rmse:0.831897	test-rmse:0.831968
[31]	train-rmse:0.820507	test-rmse:0.820581
[32]	train-rmse:0.809323	test-rmse:0.8094
[33]	train-rmse:0.798422	test-rmse:0.798503
[34]	train-rmse:0.78787	test-rmse:0.787955
[35]	train-rmse:0.77759	test-rmse:0.777678
[36]	train-rmse:0.767551	test-rmse:0.767641
[37]	train-rmse:0.757767	test-rmse:0.757861
[38]	train-rmse:0.748245	test-rmse:0.748342
[39]	train-rmse:0.739	test-rmse:0.7391
[40]	train-rmse:0.730001	test-rmse:0.730103
[41]	train-rmse:0.721251	test-rmse:0.721357
[42]	train-rmse:0.712736	test-rmse:0.712846
[43]	train-rmse:0.704461	test-rmse:0.704573
[44]	train-rmse:0.696408	test-rmse:0.696525
[45]	train-rmse:0.688587	test-rmse:0.688709
[46]	train-rmse:0.680961	test-rmse:0.681087
[47]	train-rmse:0.673592	test-rmse:0.673725
[48]	train-rmse:0.666437	test-rmse:0.666573
[49]	train-rmse:0.659455	test-rmse:0.659595
[50]	train-rmse:0.652681	test-rmse:0.652825
[51]	train-rmse:0.646116	test-rmse:0.646264
[52]	train-rmse:0.639772	test-rmse:0.639924
[53]	train-rmse:0.633617	test-rmse:0.633774
[54]	train-rmse:0.627581	test-rmse:0.627743
[55]	train-rmse:0.621739	test-rmse:0.621905
[56]	train-rmse:0.616073	test-rmse:0.616243
[57]	train-rmse:0.610583	test-rmse:0.610758
[58]	train-rmse:0.60528	test-rmse:0.605459
[59]	train-rmse:0.600101	test-rmse:0.600285
[60]	train-rmse:0.595091	test-rmse:0.595279
[61]	train-rmse:0.590267	test-rmse:0.590459
[62]	train-rmse:0.585574	test-rmse:0.58577
[63]	train-rmse:0.581025	test-rmse:0.581226
[64]	train-rmse:0.576624	test-rmse:0.576829
[65]	train-rmse:0.572336	test-rmse:0.572546
[66]	train-rmse:0.56821	test-rmse:0.568424
[67]	train-rmse:0.564226	test-rmse:0.564445
[68]	train-rmse:0.560347	test-rmse:0.560571
[69]	train-rmse:0.556572	test-rmse:0.556801
[70]	train-rmse:0.552949	test-rmse:0.553181
[71]	train-rmse:0.549416	test-rmse:0.549652
[72]	train-rmse:0.546032	test-rmse:0.546273
[73]	train-rmse:0.542755	test-rmse:0.543
[74]	train-rmse:0.539564	test-rmse:0.539814
[75]	train-rmse:0.536489	test-rmse:0.536743
[76]	train-rmse:0.533491	test-rmse:0.53375
[77]	train-rmse:0.530632	test-rmse:0.530895
[78]	train-rmse:0.527849	test-rmse:0.528118
[79]	train-rmse:0.525165	test-rmse:0.525437
[80]	train-rmse:0.522567	test-rmse:0.522843
[81]	train-rmse:0.520058	test-rmse:0.520339
[82]	train-rmse:0.517619	test-rmse:0.517904
[83]	train-rmse:0.515277	test-rmse:0.515566
[84]	train-rmse:0.512995	test-rmse:0.513288
[85]	train-rmse:0.510804	test-rmse:0.511102
[86]	train-rmse:0.508688	test-rmse:0.50899
[87]	train-rmse:0.506632	test-rmse:0.506939
[88]	train-rmse:0.504673	test-rmse:0.504984
[89]	train-rmse:0.502746	test-rmse:0.503062
[90]	train-rmse:0.500905	test-rmse:0.501224
[91]	train-rmse:0.499121	test-rmse:0.499444
[92]	train-rmse:0.497401	test-rmse:0.497729
[93]	train-rmse:0.495756	test-rmse:0.496088
[94]	train-rmse:0.494146	test-rmse:0.494483
[95]	train-rmse:0.492606	test-rmse:0.492948
[96]	train-rmse:0.491114	test-rmse:0.491459
[97]	train-rmse:0.489682	test-rmse:0.490031
[98]	train-rmse:0.488292	test-rmse:0.488646
[99]	train-rmse:0.486951	test-rmse:0.487309
[100]	train-rmse:0.485653	test-rmse:0.486015
[101]	train-rmse:0.484405	test-rmse:0.484772
[102]	train-rmse:0.483198	test-rmse:0.483569
[103]	train-rmse:0.482039	test-rmse:0.482414
[104]	train-rmse:0.480915	test-rmse:0.481294
[105]	train-rmse:0.479839	test-rmse:0.480223
[106]	train-rmse:0.478792	test-rmse:0.479179
[107]	train-rmse:0.47777	test-rmse:0.478161
[108]	train-rmse:0.476801	test-rmse:0.477196
[109]	train-rmse:0.475857	test-rmse:0.476256
[110]	train-rmse:0.474952	test-rmse:0.475355
[111]	train-rmse:0.474073	test-rmse:0.474481
[112]	train-rmse:0.47323	test-rmse:0.473642
[113]	train-rmse:0.472411	test-rmse:0.472826
[114]	train-rmse:0.47163	test-rmse:0.472049
[115]	train-rmse:0.470869	test-rmse:0.471291
[116]	train-rmse:0.470125	test-rmse:0.47055
[117]	train-rmse:0.469417	test-rmse:0.469846
[118]	train-rmse:0.468736	test-rmse:0.46917
[119]	train-rmse:0.468081	test-rmse:0.468519
[120]	train-rmse:0.467452	test-rmse:0.467894
[121]	train-rmse:0.466839	test-rmse:0.467286
[122]	train-rmse:0.466246	test-rmse:0.466696
[123]	train-rmse:0.465673	test-rmse:0.466127
[124]	train-rmse:0.465118	test-rmse:0.465575
[125]	train-rmse:0.464587	test-rmse:0.465047
[126]	train-rmse:0.464075	test-rmse:0.464539
[127]	train-rmse:0.463573	test-rmse:0.464041
[128]	train-rmse:0.463095	test-rmse:0.463568
[129]	train-rmse:0.462628	test-rmse:0.463105
[130]	train-rmse:0.462184	test-rmse:0.462665
[131]	train-rmse:0.461759	test-rmse:0.462242
[132]	train-rmse:0.461337	test-rmse:0.461824
[133]	train-rmse:0.460923	test-rmse:0.461414
[134]	train-rmse:0.460527	test-rmse:0.461021
[135]	train-rmse:0.460148	test-rmse:0.460645
[136]	train-rmse:0.45978	test-rmse:0.460282
[137]	train-rmse:0.459432	test-rmse:0.459937
[138]	train-rmse:0.459085	test-rmse:0.459594
[139]	train-rmse:0.458755	test-rmse:0.459267
[140]	train-rmse:0.458437	test-rmse:0.458953
[141]	train-rmse:0.458128	test-rmse:0.458647
[142]	train-rmse:0.457826	test-rmse:0.45835
[143]	train-rmse:0.457542	test-rmse:0.458069
[144]	train-rmse:0.457261	test-rmse:0.457792
[145]	train-rmse:0.456994	test-rmse:0.457528
[146]	train-rmse:0.456734	test-rmse:0.457272
[147]	train-rmse:0.456487	test-rmse:0.457028
[148]	train-rmse:0.456234	test-rmse:0.45678
[149]	train-rmse:0.455999	test-rmse:0.456548
[150]	train-rmse:0.455782	test-rmse:0.456333
[151]	train-rmse:0.455556	test-rmse:0.45611
[152]	train-rmse:0.455341	test-rmse:0.455899
[153]	train-rmse:0.45513	test-rmse:0.455692
[154]	train-rmse:0.454923	test-rmse:0.455489
[155]	train-rmse:0.454721	test-rmse:0.45529
[156]	train-rmse:0.454531	test-rmse:0.455105
[157]	train-rmse:0.454346	test-rmse:0.454924
[158]	train-rmse:0.454168	test-rmse:0.454748
[159]	train-rmse:0.45399	test-rmse:0.454574
[160]	train-rmse:0.45381	test-rmse:0.454398
[161]	train-rmse:0.45363	test-rmse:0.454221
[162]	train-rmse:0.453471	test-rmse:0.454065
[163]	train-rmse:0.453321	test-rmse:0.453919
[164]	train-rmse:0.453165	test-rmse:0.453766
[165]	train-rmse:0.453015	test-rmse:0.453619
[166]	train-rmse:0.452871	test-rmse:0.453479
[167]	train-rmse:0.452729	test-rmse:0.45334
[168]	train-rmse:0.452588	test-rmse:0.453203
[169]	train-rmse:0.452458	test-rmse:0.453077
[170]	train-rmse:0.452321	test-rmse:0.452943
[171]	train-rmse:0.452197	test-rmse:0.452821
[172]	train-rmse:0.45207	test-rmse:0.452698
[173]	train-rmse:0.451951	test-rmse:0.452582
[174]	train-rmse:0.451837	test-rmse:0.452471
[175]	train-rmse:0.451724	test-rmse:0.452362
[176]	train-rmse:0.451611	test-rmse:0.452253
[177]	train-rmse:0.451499	test-rmse:0.452144
[178]	train-rmse:0.451397	test-rmse:0.452044
[179]	train-rmse:0.45129	test-rmse:0.451941
[180]	train-rmse:0.451187	test-rmse:0.451841
[181]	train-rmse:0.451092	test-rmse:0.451749
[182]	train-rmse:0.450995	test-rmse:0.451656
[183]	train-rmse:0.450898	test-rmse:0.451562
[184]	train-rmse:0.450806	test-rmse:0.451475
[185]	train-rmse:0.450713	test-rmse:0.451383
[186]	train-rmse:0.450614	test-rmse:0.451288
[187]	train-rmse:0.450526	test-rmse:0.451204
[188]	train-rmse:0.450438	test-rmse:0.451119
[189]	train-rmse:0.45036	test-rmse:0.451044
[190]	train-rmse:0.450272	test-rmse:0.450959
[191]	train-rmse:0.450193	test-rmse:0.450883
[192]	train-rmse:0.450122	test-rmse:0.450817
[193]	train-rmse:0.450052	test-rmse:0.45075
[194]	train-rmse:0.449976	test-rmse:0.450676
[195]	train-rmse:0.449902	test-rmse:0.450605
[196]	train-rmse:0.449833	test-rmse:0.45054
[197]	train-rmse:0.449765	test-rmse:0.450475
[198]	train-rmse:0.449706	test-rmse:0.450419
[199]	train-rmse:0.449634	test-rmse:0.45035
[200]	train-rmse:0.44957	test-rmse:0.450289
[201]	train-rmse:0.449496	test-rmse:0.450219
[202]	train-rmse:0.449424	test-rmse:0.45015
[203]	train-rmse:0.449354	test-rmse:0.450084
[204]	train-rmse:0.44929	test-rmse:0.450023
[205]	train-rmse:0.449223	test-rmse:0.449958
[206]	train-rmse:0.449164	test-rmse:0.449902
[207]	train-rmse:0.449105	test-rmse:0.449846
[208]	train-rmse:0.449044	test-rmse:0.449788
[209]	train-rmse:0.448987	test-rmse:0.449734
[210]	train-rmse:0.448924	test-rmse:0.449675
[211]	train-rmse:0.448871	test-rmse:0.449625
[212]	train-rmse:0.448817	test-rmse:0.449574
[213]	train-rmse:0.448759	test-rmse:0.449519
[214]	train-rmse:0.448707	test-rmse:0.44947
[215]	train-rmse:0.448654	test-rmse:0.44942
[216]	train-rmse:0.448602	test-rmse:0.449371
[217]	train-rmse:0.448551	test-rmse:0.449323
[218]	train-rmse:0.448505	test-rmse:0.449278
[219]	train-rmse:0.448458	test-rmse:0.449235
[220]	train-rmse:0.448411	test-rmse:0.449191
[221]	train-rmse:0.44836	test-rmse:0.449143
[222]	train-rmse:0.448317	test-rmse:0.449103
[223]	train-rmse:0.448274	test-rmse:0.449063
[224]	train-rmse:0.44823	test-rmse:0.449021
[225]	train-rmse:0.448182	test-rmse:0.448977
[226]	train-rmse:0.448133	test-rmse:0.448931
[227]	train-rmse:0.448088	test-rmse:0.448888
[228]	train-rmse:0.448042	test-rmse:0.448845
[229]	train-rmse:0.447993	test-rmse:0.448799
[230]	train-rmse:0.447947	test-rmse:0.448756
[231]	train-rmse:0.447905	test-rmse:0.448717
[232]	train-rmse:0.447856	test-rmse:0.44867
[233]	train-rmse:0.447815	test-rmse:0.448631
[234]	train-rmse:0.447771	test-rmse:0.448592
[235]	train-rmse:0.447726	test-rmse:0.448549
[236]	train-rmse:0.447687	test-rmse:0.448514
[237]	train-rmse:0.447648	test-rmse:0.448478
[238]	train-rmse:0.447603	test-rmse:0.448436
[239]	train-rmse:0.447564	test-rmse:0.4484
[240]	train-rmse:0.447529	test-rmse:0.448368
[241]	train-rmse:0.447493	test-rmse:0.448334
[242]	train-rmse:0.447453	test-rmse:0.448298
[243]	train-rmse:0.447417	test-rmse:0.448264
[244]	train-rmse:0.447374	test-rmse:0.448224
[245]	train-rmse:0.447333	test-rmse:0.448186
[246]	train-rmse:0.447295	test-rmse:0.448151
[247]	train-rmse:0.447255	test-rmse:0.448115
[248]	train-rmse:0.447222	test-rmse:0.448084
[249]	train-rmse:0.447191	test-rmse:0.448055
[250]	train-rmse:0.447156	test-rmse:0.448025
[251]	train-rmse:0.447115	test-rmse:0.447985
[252]	train-rmse:0.447076	test-rmse:0.44795
[253]	train-rmse:0.447042	test-rmse:0.447918
[254]	train-rmse:0.447	test-rmse:0.447879
[255]	train-rmse:0.44697	test-rmse:0.447852
[256]	train-rmse:0.446932	test-rmse:0.447817
[257]	train-rmse:0.446899	test-rmse:0.447787
[258]	train-rmse:0.446861	test-rmse:0.447752
[259]	train-rmse:0.446829	test-rmse:0.447724
[260]	train-rmse:0.446797	test-rmse:0.447693
[261]	train-rmse:0.446764	test-rmse:0.447662
[262]	train-rmse:0.446726	test-rmse:0.447628
[263]	train-rmse:0.446688	test-rmse:0.447592
[264]	train-rmse:0.446647	test-rmse:0.447553
[265]	train-rmse:0.44661	test-rmse:0.447519
[266]	train-rmse:0.446577	test-rmse:0.447489
[267]	train-rmse:0.446545	test-rmse:0.44746
[268]	train-rmse:0.446514	test-rmse:0.447431
[269]	train-rmse:0.446484	test-rmse:0.447404
[270]	train-rmse:0.446454	test-rmse:0.447376
[271]	train-rmse:0.446422	test-rmse:0.447346
[272]	train-rmse:0.446396	test-rmse:0.447323
[273]	train-rmse:0.446364	test-rmse:0.447294
[274]	train-rmse:0.446332	test-rmse:0.447264
[275]	train-rmse:0.446307	test-rmse:0.447242
[276]	train-rmse:0.446277	test-rmse:0.447215
[277]	train-rmse:0.446248	test-rmse:0.447188
[278]	train-rmse:0.446216	test-rmse:0.447159
[279]	train-rmse:0.446193	test-rmse:0.447138
[280]	train-rmse:0.446167	test-rmse:0.447115
[281]	train-rmse:0.44614	test-rmse:0.447091
[282]	train-rmse:0.446108	test-rmse:0.447062
[283]	train-rmse:0.446081	test-rmse:0.447038
[284]	train-rmse:0.446053	test-rmse:0.447012
[285]	train-rmse:0.446027	test-rmse:0.446989
[286]	train-rmse:0.445998	test-rmse:0.446962
[287]	train-rmse:0.445972	test-rmse:0.446939
[288]	train-rmse:0.445947	test-rmse:0.446917
[289]	train-rmse:0.445918	test-rmse:0.446891
[290]	train-rmse:0.445894	test-rmse:0.44687
[291]	train-rmse:0.445868	test-rmse:0.446847
[292]	train-rmse:0.445834	test-rmse:0.446815
[293]	train-rmse:0.44581	test-rmse:0.446794
[294]	train-rmse:0.445784	test-rmse:0.446771
[295]	train-rmse:0.445756	test-rmse:0.446746
[296]	train-rmse:0.445726	test-rmse:0.446719
[297]	train-rmse:0.445702	test-rmse:0.446697
[298]	train-rmse:0.445675	test-rmse:0.446673
[299]	train-rmse:0.445652	test-rmse:0.446653
[300]	train-rmse:0.445628	test-rmse:0.446632
[301]	train-rmse:0.445602	test-rmse:0.446609
[302]	train-rmse:0.445573	test-rmse:0.446582
[303]	train-rmse:0.445544	test-rmse:0.446557
[304]	train-rmse:0.445514	test-rmse:0.44653
[305]	train-rmse:0.445495	test-rmse:0.446513
[306]	train-rmse:0.445474	test-rmse:0.446494
[307]	train-rmse:0.44545	test-rmse:0.446473
[308]	train-rmse:0.44543	test-rmse:0.446456
[309]	train-rmse:0.445402	test-rmse:0.44643
[310]	train-rmse:0.445378	test-rmse:0.446408
[311]	train-rmse:0.445351	test-rmse:0.446384
[312]	train-rmse:0.445327	test-rmse:0.446363
[313]	train-rmse:0.445303	test-rmse:0.446342
[314]	train-rmse:0.445277	test-rmse:0.446319
[315]	train-rmse:0.445255	test-rmse:0.4463
[316]	train-rmse:0.445236	test-rmse:0.446283
[317]	train-rmse:0.445212	test-rmse:0.446263
[318]	train-rmse:0.445188	test-rmse:0.446242
[319]	train-rmse:0.445166	test-rmse:0.446223
[320]	train-rmse:0.445145	test-rmse:0.446205
[321]	train-rmse:0.445123	test-rmse:0.446186
[322]	train-rmse:0.445105	test-rmse:0.446172
[323]	train-rmse:0.445086	test-rmse:0.446155
[324]	train-rmse:0.445059	test-rmse:0.446132
[325]	train-rmse:0.445035	test-rmse:0.44611
[326]	train-rmse:0.445018	test-rmse:0.446095
[327]	train-rmse:0.444994	test-rmse:0.446074
[328]	train-rmse:0.444974	test-rmse:0.446056
[329]	train-rmse:0.444953	test-rmse:0.446037
[330]	train-rmse:0.444928	test-rmse:0.446016
[331]	train-rmse:0.444907	test-rmse:0.445997
[332]	train-rmse:0.444885	test-rmse:0.445977
[333]	train-rmse:0.444869	test-rmse:0.445964
[334]	train-rmse:0.444852	test-rmse:0.445952
[335]	train-rmse:0.444824	test-rmse:0.445926
[336]	train-rmse:0.444798	test-rmse:0.445903
[337]	train-rmse:0.444777	test-rmse:0.445885
[338]	train-rmse:0.444759	test-rmse:0.44587
[339]	train-rmse:0.444734	test-rmse:0.445847
[340]	train-rmse:0.444708	test-rmse:0.445825
[341]	train-rmse:0.444689	test-rmse:0.445808
[342]	train-rmse:0.444665	test-rmse:0.445787
[343]	train-rmse:0.444642	test-rmse:0.445767
[344]	train-rmse:0.444624	test-rmse:0.445752
[345]	train-rmse:0.444608	test-rmse:0.445738
[346]	train-rmse:0.444586	test-rmse:0.44572
[347]	train-rmse:0.444567	test-rmse:0.445703
[348]	train-rmse:0.444546	test-rmse:0.445685
[349]	train-rmse:0.44452	test-rmse:0.445662
[350]	train-rmse:0.444499	test-rmse:0.445644
[351]	train-rmse:0.444479	test-rmse:0.445626
[352]	train-rmse:0.444456	test-rmse:0.445605
[353]	train-rmse:0.444435	test-rmse:0.445587
[354]	train-rmse:0.444412	test-rmse:0.445567
[355]	train-rmse:0.444397	test-rmse:0.445554
[356]	train-rmse:0.444378	test-rmse:0.445539
[357]	train-rmse:0.444358	test-rmse:0.445521
[358]	train-rmse:0.444342	test-rmse:0.445508
[359]	train-rmse:0.444323	test-rmse:0.445492
[360]	train-rmse:0.444303	test-rmse:0.445474
[361]	train-rmse:0.444284	test-rmse:0.445458
[362]	train-rmse:0.444265	test-rmse:0.445442
[363]	train-rmse:0.444242	test-rmse:0.445421
[364]	train-rmse:0.44423	test-rmse:0.445412
[365]	train-rmse:0.444209	test-rmse:0.445394
[366]	train-rmse:0.444193	test-rmse:0.445381
[367]	train-rmse:0.444177	test-rmse:0.445367
[368]	train-rmse:0.444155	test-rmse:0.445348
[369]	train-rmse:0.444137	test-rmse:0.445333
[370]	train-rmse:0.444118	test-rmse:0.445316
[371]	train-rmse:0.444102	test-rmse:0.445303
[372]	train-rmse:0.444086	test-rmse:0.445289
[373]	train-rmse:0.444066	test-rmse:0.445272
[374]	train-rmse:0.444046	test-rmse:0.445255
[375]	train-rmse:0.444027	test-rmse:0.445239
[376]	train-rmse:0.44401	test-rmse:0.445224
[377]	train-rmse:0.443989	test-rmse:0.445207
[378]	train-rmse:0.44397	test-rmse:0.44519
[379]	train-rmse:0.443954	test-rmse:0.445177
[380]	train-rmse:0.443932	test-rmse:0.445157
[381]	train-rmse:0.443913	test-rmse:0.445141
[382]	train-rmse:0.443891	test-rmse:0.445122
[383]	train-rmse:0.443876	test-rmse:0.44511
[384]	train-rmse:0.44386	test-rmse:0.445098
[385]	train-rmse:0.443845	test-rmse:0.445084
[386]	train-rmse:0.443827	test-rmse:0.445071
[387]	train-rmse:0.443812	test-rmse:0.445058
[388]	train-rmse:0.443796	test-rmse:0.445044
[389]	train-rmse:0.443783	test-rmse:0.445035
[390]	train-rmse:0.443768	test-rmse:0.445022
[391]	train-rmse:0.443748	test-rmse:0.445005
[392]	train-rmse:0.443727	test-rmse:0.444988
[393]	train-rmse:0.443713	test-rmse:0.444976
[394]	train-rmse:0.4437	test-rmse:0.444966
[395]	train-rmse:0.443683	test-rmse:0.444952
[396]	train-rmse:0.44367	test-rmse:0.444942
[397]	train-rmse:0.443654	test-rmse:0.444929
[398]	train-rmse:0.443639	test-rmse:0.444916
[399]	train-rmse:0.443624	test-rmse:0.444904
     test-rmse-mean  test-rmse-std  train-rmse-mean  train-rmse-std
395        0.444952       0.000282         0.443683        0.000118
396        0.444942       0.000283         0.443670        0.000118
397        0.444929       0.000287         0.443654        0.000114
398        0.444916       0.000289         0.443639        0.000112
399        0.444904       0.000287         0.443624        0.000115

In [83]:
cvresult[['test-rmse-mean','train-rmse-mean']].plot()


Out[83]:
<matplotlib.axes.AxesSubplot at 0x7fa7b4f11750>

In [72]:
cvresult[['test-rmse-mean','train-rmse-mean']].plot()


Out[72]:
<matplotlib.axes.AxesSubplot at 0x7fa7b62e1b90>

In [79]:
curve_all = cvresult[['test-rmse-mean','train-rmse-mean']].copy()
  • 287: 0.452760, max_depth: 5, nthread = 1
  • 771 : 0.445958, max_depth: 5, nthread = 12
  • 429 : 0.445676, max_depth: 6
  • 348 : 0.445456, max_depth:7
  • 348 : 0.445216, max_depth:8

begin combine for all train model



In [24]:
submission_11 = pd.read_csv('submission_11_new.csv',index_col = 0)

In [25]:
submission_11.reset_index(inplace = True)
submission_11.head()


Out[25]:
id predict
0 1547831 4.406201
1 6825659 3.053817
2 5853787 2.684612
3 2316053 1.259826
4 900676 2.301486

In [26]:
submission_all_train = pd.concat([submission_10_all_train,submission_11],axis =0)
submission_all_train['predict'] = submission_all_train['predict'].apply(np.expm1)
submission_all_train.rename(columns = {'predict':'Demanda_uni_equil'},inplace = True)
submission_all_train['Demanda_uni_equil'] = submission_all_train['Demanda_uni_equil'].round(1)

In [27]:
submission_all_train.loc[submission_all_train['Demanda_uni_equil']<0,'Demanda_uni_equil'] = 0

In [28]:
submission_all_train['Demanda_uni_equil'].describe()


Out[28]:
count    6.999251e+06
mean     6.120201e+00
std      1.580884e+01
min      0.000000e+00
25%      1.900000e+00
50%      3.100000e+00
75%      5.700000e+00
max      2.598100e+03
Name: Demanda_uni_equil, dtype: float64

In [29]:
submission_all_train['id'] = submission_all_train['id'].astype(int)

In [30]:
submission_all_train.head()


Out[30]:
Semana id Demanda_uni_equil
0 1569352 8.4
1 6667200 33.8
2 1592616 18.4
3 3909690 62.3
4 3659672 34.5

In [31]:
submission_all_train.to_csv('submission_all_train_15.csv',index = False)

In [136]:
%ls


1.5_create_lag.ipynb              pivot_test.pickle
1_predata.ipynb                   pivot_train_with_nan.pickle
1_predata_whole.ipynb             preprocessed_products.csv
3_xgb_43fea.ipynb                 RF_model/
3_xgb.ipynb                       ruta_for_cliente_producto.csv
3_xgb_prediction.ipynb            stack_sub/
3_xgb_test.ipynb                  submission_10_new.csv
4_keras_nn.ipynb                  submission_11_new.csv
5_random_forest.ipynb             submission_44fea.csv
6_stack_model.ipynb               submission_all_train_2.csv
7_SGD_regressor.ipynb             submission_all_train_3.csv
8_svm_linearSVR.ipynb             submission_all_train.csv
agencia_for_cliente_producto.csv  submission_nn.csv
bst_1000.model                    submission_nn_xgb
bst_1200_eta005.model             train_pivot_45678_to_9_whole_zero.csv
bst_200_eta005.model              train_pivot_56789_to_10_44fea.pickle
bst_400_eta005.model              train_pivot_56789_to_10_44fea_zero.pickle
bst_551_eta02.model               train_pivot_56789_to_10_new.pickle
bst_600_eta005.model              train_pivot_6789_to_11_new.pickle
bst_use_all_train.model           train_pivot_xgb_time1_44fea.csv
canal_for_cliente_producto.csv    train_pivot_xgb_time1_44fea_zero.csv
old_submission/                   train_pivot_xgb_time1.pickle
origin/                           train_pivot_xgb_time2_38fea.csv

In [ ]:

begin cross validation


In [21]:
num_round = 392

dtest_10 = xgb.DMatrix(test_dataset_10_normalize[predictors_10], missing=np.nan)
submission_10 = train_pivot_56789_to_10[['id']].copy()
i = 0

for i in range(20):
    train_pivot_xgb_time1_sample = train_dataset_10_normalize[predictors_target_10].sample(2000000)

    train_feature_10 = train_pivot_xgb_time1_sample.drop(['target'],axis = 1)
    train_label_10 = train_pivot_xgb_time1_sample[['target']]

    dtrain_10 = xgb.DMatrix(train_feature_10,label = train_label_10,missing= np.nan)
    
    bst = xgb.train(param_10, dtrain_10, num_round)
    print str(i) + 'training finished!'
    submission_10['predict_' + str(i)] = bst.predict(dtest_10)
    print str(i) + 'predicting finished!'


print 'finished'


0training finished!
0predicting finished!
1training finished!
1predicting finished!
2training finished!
2predicting finished!
3training finished!
3predicting finished!
4training finished!
4predicting finished!
5training finished!
5predicting finished!
6training finished!
6predicting finished!
7training finished!
7predicting finished!
8training finished!
8predicting finished!
9training finished!
9predicting finished!
10training finished!
10predicting finished!
11training finished!
11predicting finished!
12training finished!
12predicting finished!
13training finished!
13predicting finished!
14training finished!
14predicting finished!
15training finished!
15predicting finished!
16training finished!
16predicting finished!
17training finished!
17predicting finished!
18training finished!
18predicting finished!
19training finished!
19predicting finished!
finished

In [22]:
submission_10.to_csv('submission_10_new.csv')

In [26]:
# make prediction
xgb.plot_importance(bst)


Out[26]:
<matplotlib.axes.AxesSubplot at 0x7fc793a07dd0>

result for 44fea



In [13]:
submission_11['predict'] = submission_11[['predict_' + str(i) for i in range(20)]].mean(axis=1)

In [14]:
submission_11 = submission_11[['id','predict']]
gc.collect()
submission_11.head()


Out[14]:
id predict
0 1547831 4.406201
1 6825659 3.053817
2 5853787 2.684612
3 2316053 1.259826
4 900676 2.301486

In [24]:
submission_11.to_csv('submission_11_new.csv',index = False)

In [16]:
submission_44fea = pd.concat([submission_44fea,submission_11],axis =0,copy = False)

In [17]:
submission_44fea.shape


Out[17]:
(6999251, 2)

In [18]:
submission_44fea.rename(columns = {'predict':'Demanda_uni_equil'},inplace = True)
submission_44fea['Demanda_uni_equil'] = submission_44fea['Demanda_uni_equil'].apply(np.expm1)
submission_44fea.head()


Out[18]:
Semana id Demanda_uni_equil
0 1569352 10.206472
1 6667200 35.766411
2 1592616 17.642273
3 3909690 62.235741
4 3659672 34.847991

In [19]:
submission_44fea['Demanda_uni_equil'] = submission_44fea['Demanda_uni_equil'].round(1)

In [20]:
submission_44fea['Demanda_uni_equil'].describe()


Out[20]:
count    6.999251e+06
mean     6.075400e+00
std      1.606870e+01
min     -7.000000e-01
25%      1.900000e+00
50%      3.100000e+00
75%      5.600000e+00
max      2.879800e+03
Name: Demanda_uni_equil, dtype: float64

In [21]:
mask = submission_44fea[submission_44fea['Demanda_uni_equil'] <= 0].index
submission_44fea.loc[mask,'Demanda_uni_equil'] = 0
submission_44fea['Demanda_uni_equil'].describe()


Out[21]:
count    6.999251e+06
mean     6.074749e+00
std      1.606832e+01
min      0.000000e+00
25%      1.900000e+00
50%      3.100000e+00
75%      5.600000e+00
max      2.879800e+03
Name: Demanda_uni_equil, dtype: float64

In [22]:
submission_44fea.head()


Out[22]:
Semana id Demanda_uni_equil
0 1569352 10.2
1 6667200 35.8
2 1592616 17.6
3 3909690 62.2
4 3659672 34.8

In [23]:
submission_44fea.to_csv('submission_44fea.csv',index = False)