av_student_datafest_scratch_fe's


imports


In [1]:
%load_ext autoreload
%autoreload 2
%matplotlib inline

from fastai.imports import *
from fastai.structured import *

from mlcrate import *
import time
from gplearn.genetic import SymbolicTransformer
from pandas_summary import DataFrameSummary
from sklearn.ensemble import RandomForestRegressor, RandomForestClassifier
from IPython.display import display
import xgboost as xgb
import lightgbm as lgb
from catboost import CatBoostClassifier
import gc
from scipy.cluster import hierarchy as hc
from sklearn import metrics
from sklearn.model_selection import train_test_split
from sklearn.metrics import mean_squared_error
from sklearn.metrics import  roc_auc_score, log_loss
from sklearn.model_selection import StratifiedKFold
def ignore_warn(*args, **kwargs):
    pass
warnings.warn = ignore_warn
#will ignore all warning from sklearn, seaborn etc..

pd.option_context("display.max_rows", 1000);
pd.option_context("display.max_columns", 1000);

In [2]:
PATH = os.getcwd();
PATH


Out[2]:
'D:\\Github\\fastai\\courses\\ml1'

In [11]:
df_raw  = pd.read_csv(f'{PATH}\\AV_Stud\\lgb_train_v1.csv', low_memory= False)
df_test = pd.read_csv(f'{PATH}\\AV_Stud\\lgb_test_v1.csv', low_memory=False)

In [60]:
df_raw.columns[:3]


Out[60]:
Index(['Unnamed: 0', 'program_id', 'program_type'], dtype='object')

In [90]:
df_raw.drop('Unnamed: 0', axis=1, inplace=True)
df_test.drop('Unnamed: 0', axis=1, inplace=True)

In [64]:
df_raw.head(2)


Out[64]:
program_id program_type program_duration test_id test_type difficulty_level trainee_id gender education city_tier ... impact_encoded_is_handicapped impact_encoded_trainee_engagement_rating impact_encoded_program_type__program_duration impact_encoded_test_id__program_duration impact_encoded_test_id__test_type impact_encoded_test_type__difficulty_level impact_encoded_education__gender impact_encoded_education__city_tier impact_encoded_gender__city_tier is_pass
0 Y_1 Y 136 150 offline intermediate 9389 M Matriculation 3 ... 0.701995 0.592726 0.785453 0.751688 0.751967 0.656276 0.653450 0.646735 0.698832 0
1 T_1 T 131 44 offline easy 16523 F High School Diploma 4 ... 0.701995 0.737547 0.513423 0.467994 0.467950 0.619956 0.708825 0.653126 0.602991 1

2 rows × 41 columns


In [5]:
cols_dropped = ['id']
target = df_raw.is_pass
df_raw.drop('is_pass', inplace=True, axis =1)

In [6]:
df_raw['trainee_id'].value_counts().sort_values(ascending=False).plot(kind='hist')


Out[6]:
<matplotlib.axes._subplots.AxesSubplot at 0x2969e06a748>

In [7]:
df_test['trainee_id'].value_counts().sort_values(ascending=False).plot(kind='hist')


Out[7]:
<matplotlib.axes._subplots.AxesSubplot at 0x2969e06a6d8>

In [8]:
from collections import Counter

train_trainee_id = Counter(df_raw['trainee_id'])
test_trainee_id = Counter(df_test['trainee_id'])

######################### New Cols Added ##########################

df_raw['trainee_count'] = df_raw['trainee_id'].map(train_trainee_id).astype(np.int32)
df_test['trainee_count'] = df_test['trainee_id'].map(train_trainee_id).astype(np.int32)

########################### Creating Validation Dataset #########################

'''
rows_to_dropped = df_raw.loc[df_raw['trainee_count'] <=3].index
valid_data = df_raw.loc[df_raw['trainee_count'] <=3]

df_raw.drop(rows_to_dropped, inplace=True)
df_raw.shape

########################## Done For Temp ########################################
''';

In [9]:
##################### Dropping ID #######################
df_raw.drop('id', axis =1, inplace=True)
df_test.drop('id', axis =1, inplace=True)
#########################################################

encoding means


In [10]:
# This way we have randomness and are able to reproduce the behaviour within this cell.
np.random.seed(13)
from sklearn.model_selection import KFold

def impact_coding(data, feature, target='y'):
    '''
    In this implementation we get the values and the dictionary as two different steps.
    This is just because initially we were ignoring the dictionary as a result variable.
    
    In this implementation the KFolds use shuffling. If you want reproducibility the cv 
    could be moved to a parameter.
    '''
    n_folds = 7
    n_inner_folds = 5
    impact_coded = pd.Series()
    
    oof_default_mean = data[target].mean() # Gobal mean to use by default (you could further tune this)
    kf = KFold(n_splits=n_folds, shuffle=True)
    oof_mean_cv = pd.DataFrame()
    split = 0
    for infold, oof in kf.split(data[feature]):
            impact_coded_cv = pd.Series()
            kf_inner = KFold(n_splits=n_inner_folds, shuffle=True)
            inner_split = 0
            inner_oof_mean_cv = pd.DataFrame()
            oof_default_inner_mean = data.iloc[infold][target].mean()
            for infold_inner, oof_inner in kf_inner.split(data.iloc[infold]):
                # The mean to apply to the inner oof split (a 1/n_folds % based on the rest)
                oof_mean = data.iloc[infold_inner].groupby(by=feature)[target].mean()
                impact_coded_cv = impact_coded_cv.append(data.iloc[infold].apply(
                            lambda x: oof_mean[x[feature]]
                                      if x[feature] in oof_mean.index
                                      else oof_default_inner_mean
                            , axis=1))

                # Also populate mapping (this has all group -> mean for all inner CV folds)
                inner_oof_mean_cv = inner_oof_mean_cv.join(pd.DataFrame(oof_mean), rsuffix=inner_split, how='outer')
                inner_oof_mean_cv.fillna(value=oof_default_inner_mean, inplace=True)
                inner_split += 1

            # Also populate mapping
            oof_mean_cv = oof_mean_cv.join(pd.DataFrame(inner_oof_mean_cv), rsuffix=split, how='outer')
            oof_mean_cv.fillna(value=oof_default_mean, inplace=True)
            split += 1
            
            impact_coded = impact_coded.append(data.iloc[oof].apply(
                            lambda x: inner_oof_mean_cv.loc[x[feature]].mean()
                                      if x[feature] in inner_oof_mean_cv.index
                                      else oof_default_mean
                            , axis=1))

    return impact_coded, oof_mean_cv.mean(axis=1), oof_default_mean

In [13]:
df_raw['program_type__program_duration'] = df_raw.program_type.str.cat(df_raw.program_duration.astype(str),sep='_')
df_raw['test_id__program_duration'] = df_raw.test_id.astype(str).str.cat(df_raw.program_duration.astype(str),sep='_')
df_raw['test_id__test_type'] = df_raw.test_id.astype(str).str.cat(df_raw.test_type.astype(str),sep='_')
df_raw['test_type__difficulty_level'] = df_raw.test_type.str.cat(df_raw.difficulty_level.astype(str),sep='_')
df_raw['education__gender'] = df_raw.education.str.cat(df_raw.gender.astype(str),sep='_')
df_raw['education__city_tier'] = df_raw.education.str.cat(df_raw.city_tier.astype(str),sep='_')
df_raw['gender__city_tier'] = df_raw.gender.str.cat(df_raw.city_tier.astype(str),sep='_')
df_raw['trainee_engagement_rating']  = df_raw['trainee_engagement_rating'].astype(object)
###########################kind of binning age at trivial level #####################################

df_raw['is_age_39'] = np.zeros(df_raw.shape[0])
my_query = df_raw.query('age<=39.').index
df_raw.iloc[my_query, -1] = 1
df_raw['is_age_39_45'] = np.zeros(df_raw.shape[0])
my_query = df_raw.query('age>=39. & age<=45.').index
df_raw.iloc[my_query, -1] = 1
df_raw['is_age_45'] = np.zeros(df_raw.shape[0])
my_query = df_raw.query('age>=45.').index
df_raw.iloc[my_query, -1] = 1

#######################################################################################################
###################young age (13–30), middle age (31–50) and senior age (51–70)########################
#######################################################################################################

'''df_raw['age_group'] = np.zeros(df_raw.shape[0])
my_query = df_raw.query('age>=13. & age<=30.').index
df_raw.iloc[my_query, -1] = 'young'
my_query = df_raw.query('age>=31. & age<=50.').index
df_raw.iloc[my_query, -1] = 'middle_aged'
my_query = df_raw.query('age>=51. & age<=70.').index
df_raw.iloc[my_query, -1] = 'senior_aged'''
###################################################################################################################
###################################################################################################################
###################################################################################################################

df_test['program_type__program_duration'] = df_test.program_type.str.cat(df_test.program_duration.astype(str),sep='_')
df_test['test_id__program_duration'] = df_test.test_id.astype(str).str.cat(df_test.program_duration.astype(str),sep='_')
df_test['test_id__test_type'] = df_test.test_id.astype(str).str.cat(df_test.test_type.astype(str),sep='_')
df_test['test_type__difficulty_level'] = df_test.test_type.str.cat(df_test.difficulty_level.astype(str),sep='_')
df_test['education__gender'] = df_test.education.str.cat(df_test.gender.astype(str),sep='_')
df_test['education__city_tier'] = df_test.education.str.cat(df_test.city_tier.astype(str),sep='_')
df_test['gender__city_tier'] = df_test.gender.str.cat(df_test.city_tier.astype(str),sep='_')
df_test['trainee_engagement_rating']  = df_test['trainee_engagement_rating'].astype(object)
###########################kind of binning age at trivial level #####################################

df_test['is_age_39'] = np.zeros(df_test.shape[0])
my_query = df_test.query('age<=39.').index
df_test.iloc[my_query, -1] = 1
df_test['is_age_39_45'] = np.zeros(df_test.shape[0])
my_query = df_test.query('age>=39. & age<=45.').index
df_test.iloc[my_query, -1] = 1
df_test['is_age_45'] = np.zeros(df_test.shape[0])
my_query = df_test.query('age>=45.').index
df_test.iloc[my_query, -1] = 1

#######################################################################################################
###################young age (13–30), middle age (31–50) and senior age (51–70)########################
#######################################################################################################
'''
df_test['age_group'] = np.zeros(df_test.shape[0])
my_query = df_test.query('age>=13. & age<=30.').index
df_test.iloc[my_query, -1] = 'young'
my_query = df_test.query('age>=31. & age<=50.').index
df_test.iloc[my_query, -1] = 'middle_aged'
my_query = df_test.query('age>=51. & age<=70.').index
df_test.iloc[my_query, -1] = 'senior_aged''';
###############################################################################

In [14]:
features = df_raw.columns
numeric_features = []
categorical_features = []

for dtype, feature in zip(df_raw.dtypes, df_raw.columns):
    if dtype == object:
        #print(column)
        #print(train_data[column].describe())
        categorical_features.append(feature)
    else:
        numeric_features.append(feature)
categorical_features


Out[14]:
['program_id',
 'program_type',
 'test_type',
 'difficulty_level',
 'gender',
 'education',
 'is_handicapped',
 'trainee_engagement_rating',
 'program_type__program_duration',
 'test_id__program_duration',
 'test_id__test_type',
 'test_type__difficulty_level',
 'education__gender',
 'education__city_tier',
 'gender__city_tier']

In [15]:
df_raw['is_pass'] = target
###############################remeber to drop the target column again after the next block

In [16]:
%%time
# Apply the encoding to training and test data, and preserve the mapping
impact_coding_map = {}
for f in categorical_features:
    print("Impact coding for {}".format(f))
    df_raw["impact_encoded_{}".format(f)], impact_coding_mapping, default_coding = impact_coding(df_raw, f,'is_pass')
    impact_coding_map[f] = (impact_coding_mapping, default_coding)
    mapping, default_mean = impact_coding_map[f]
    df_test["impact_encoded_{}".format(f)] = df_test.apply(lambda x: mapping[x[f]]
                                                                         if x[f] in mapping
                                                                         else default_mean
                                                               , axis=1)

df_raw.drop('is_pass', inplace=True, axis =1);


Impact coding for program_id
Impact coding for program_type
Impact coding for test_type
Impact coding for difficulty_level
Impact coding for gender
Impact coding for education
Impact coding for is_handicapped
Impact coding for trainee_engagement_rating
Impact coding for program_type__program_duration
Impact coding for test_id__program_duration
Impact coding for test_id__test_type
Impact coding for test_type__difficulty_level
Impact coding for education__gender
Impact coding for education__city_tier
Impact coding for gender__city_tier
Wall time: 19min 25s

In [17]:
##################### sanity check  should be empty #####################
set(df_raw.columns) - set(df_test.columns)


Out[17]:
set()

In [18]:
df_raw['is_pass'] = target
df_raw.to_csv(f'{PATH}\\AV_Stud\\lgb_train_v1.csv')
df_test.to_csv(f'{PATH}\\AV_Stud\\lgb_test_v1.csv')
df_raw.drop('is_pass', inplace=True, axis =1);

modelling


In [20]:
X_train, X_test, y = df_raw.copy(), df_test.copy(), target

In [21]:
X_train['is_pass'] = target

In [22]:
rows_to_dropped = X_train.loc[X_train['trainee_count'] <=3].index

X_valid = X_train.loc[X_train['trainee_count'] <=3] #x_valid having is_pass intact drop at end
X_train.drop(rows_to_dropped, inplace=True) #x_train having is_pass intact drop at end

y_train = X_train['is_pass'] 
y_valid = X_valid['is_pass']

X_train.drop('is_pass', axis =1, inplace=True)
X_valid.drop('is_pass', axis =1, inplace=True)

In [23]:
####################### sanity checks #######################
X_train.shape, y_train.shape, X_valid.shape, y_valid.shape, df_raw.shape, df_test.shape


Out[23]:
((55514, 40), (55514,), (17633, 40), (17633,), (73147, 41), (31349, 40))

In [24]:
df_raw['trainee_engagement_rating'].value_counts() 
########Addd this as a feature tooo split rating at 4 #########################:


Out[24]:
1.0    25595
2.0    17234
4.0    13113
3.0    11553
5.0     5575
Name: trainee_engagement_rating, dtype: int64

In [37]:
df_raw['age'].fillna(-1, inplace=True)
df_test['age'].fillna(-1, inplace=True)

df_raw['trainee_engagement_rating'].fillna(method='ffill', inplace=True)
df_test['trainee_engagement_rating'].fillna(method='ffill', inplace=True)

In [65]:
#X_train['age'].fillna(int(df_raw['age'].mean()), inplace=True)
#X_valid['age'].fillna(int(df_raw['age'].mean()), inplace=True)

#X_train['trainee_engagement_rating'].fillna(value=2., inplace=True)
#X_valid['trainee_engagement_rating'].fillna(value=2., inplace=True)

In [30]:
train_cats(df_raw)
apply_cats(df_test, df_raw);

In [19]:
categorical_features_indices = np.where(df_raw.dtypes == 'object')[0];
#df_raw.drop('is_pass',axis=1,inplace=True);
categorical_features_indices


Out[19]:
array([ 0,  1,  4,  5,  7,  8, 12, 13, 15, 16, 17, 18, 19, 20, 21], dtype=int64)

In [39]:
X_train, X_valid, y_train, y_valid = train_test_split(df_raw, target, train_size=0.8, random_state=1234)

model=CatBoostClassifier(iterations=200, depth=12, learning_rate=0.5, loss_function='Logloss',use_best_model=True,\
                class_weights = [0.3045921227117995, 0.6954078772882005 ])

model.fit(X_train, y_train, cat_features=categorical_features_indices, eval_set=(X_valid, y_valid));


0:	learn: 0.4743817	test: 0.4719726	best: 0.4719726 (0)	total: 884ms	remaining: 2m 55s
1:	learn: 0.4359836	test: 0.4339903	best: 0.4339903 (1)	total: 960ms	remaining: 1m 35s
2:	learn: 0.4270392	test: 0.4251859	best: 0.4251859 (2)	total: 1.14s	remaining: 1m 14s
3:	learn: 0.4249625	test: 0.4232204	best: 0.4232204 (3)	total: 1.2s	remaining: 58.7s
4:	learn: 0.4244562	test: 0.4227661	best: 0.4227661 (4)	total: 1.24s	remaining: 48.6s
5:	learn: 0.4243312	test: 0.4226662	best: 0.4226662 (5)	total: 1.29s	remaining: 41.9s
6:	learn: 0.4243001	test: 0.4226475	best: 0.4226475 (6)	total: 1.34s	remaining: 36.9s
7:	learn: 0.4242924	test: 0.4226458	best: 0.4226458 (7)	total: 1.39s	remaining: 33.3s
8:	learn: 0.4242905	test: 0.4226470	best: 0.4226458 (7)	total: 1.43s	remaining: 30.4s
9:	learn: 0.4242900	test: 0.4226480	best: 0.4226458 (7)	total: 1.48s	remaining: 28.1s
10:	learn: 0.4242899	test: 0.4226486	best: 0.4226458 (7)	total: 1.54s	remaining: 26.5s
11:	learn: 0.4242898	test: 0.4226490	best: 0.4226458 (7)	total: 1.59s	remaining: 24.8s
12:	learn: 0.4242898	test: 0.4226492	best: 0.4226458 (7)	total: 1.64s	remaining: 23.5s
13:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 1.68s	remaining: 22.3s
14:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 1.73s	remaining: 21.3s
15:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 1.77s	remaining: 20.4s
16:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 1.82s	remaining: 19.5s
17:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 1.86s	remaining: 18.9s
18:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 1.91s	remaining: 18.2s
19:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 1.96s	remaining: 17.6s
20:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 2s	remaining: 17.1s
21:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 2.05s	remaining: 16.6s
22:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 2.1s	remaining: 16.2s
23:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 2.15s	remaining: 15.8s
24:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 2.21s	remaining: 15.4s
25:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 2.27s	remaining: 15.2s
26:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 2.33s	remaining: 15s
27:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 2.39s	remaining: 14.7s
28:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 2.44s	remaining: 14.4s
29:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 2.5s	remaining: 14.1s
30:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 2.56s	remaining: 14s
31:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 2.63s	remaining: 13.8s
32:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 2.69s	remaining: 13.6s
33:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 2.75s	remaining: 13.4s
34:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 2.8s	remaining: 13.2s
35:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 2.85s	remaining: 13s
36:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 2.91s	remaining: 12.8s
37:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 2.97s	remaining: 12.7s
38:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 3.03s	remaining: 12.5s
39:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 3.09s	remaining: 12.4s
40:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 3.15s	remaining: 12.2s
41:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 3.2s	remaining: 12s
42:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 3.27s	remaining: 11.9s
43:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 3.32s	remaining: 11.8s
44:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 3.38s	remaining: 11.6s
45:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 3.44s	remaining: 11.5s
46:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 3.49s	remaining: 11.4s
47:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 3.56s	remaining: 11.3s
48:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 3.61s	remaining: 11.1s
49:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 3.68s	remaining: 11s
50:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 3.74s	remaining: 10.9s
51:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 3.79s	remaining: 10.8s
52:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 3.85s	remaining: 10.7s
53:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 3.92s	remaining: 10.6s
54:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 3.99s	remaining: 10.5s
55:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 4.04s	remaining: 10.4s
56:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 4.1s	remaining: 10.3s
57:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 4.16s	remaining: 10.2s
58:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 4.22s	remaining: 10.1s
59:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 4.28s	remaining: 9.98s
60:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 4.34s	remaining: 9.88s
61:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 4.39s	remaining: 9.78s
62:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 4.45s	remaining: 9.68s
63:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 4.51s	remaining: 9.58s
64:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 4.57s	remaining: 9.5s
65:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 4.64s	remaining: 9.42s
66:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 4.7s	remaining: 9.34s
67:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 4.76s	remaining: 9.24s
68:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 4.83s	remaining: 9.16s
69:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 4.88s	remaining: 9.07s
70:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 4.94s	remaining: 8.97s
71:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 5s	remaining: 8.89s
72:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 5.06s	remaining: 8.8s
73:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 5.12s	remaining: 8.71s
74:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 5.17s	remaining: 8.62s
75:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 5.24s	remaining: 8.56s
76:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 5.3s	remaining: 8.47s
77:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 5.36s	remaining: 8.38s
78:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 5.42s	remaining: 8.3s
79:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 5.48s	remaining: 8.22s
80:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 5.54s	remaining: 8.14s
81:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 5.6s	remaining: 8.06s
82:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 5.66s	remaining: 7.97s
83:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 5.71s	remaining: 7.89s
84:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 5.77s	remaining: 7.81s
85:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 5.83s	remaining: 7.73s
86:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 5.89s	remaining: 7.65s
87:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 5.95s	remaining: 7.57s
88:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 6s	remaining: 7.49s
89:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 6.07s	remaining: 7.41s
90:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 6.12s	remaining: 7.33s
91:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 6.18s	remaining: 7.26s
92:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 6.24s	remaining: 7.18s
93:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 6.31s	remaining: 7.11s
94:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 6.36s	remaining: 7.03s
95:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 6.42s	remaining: 6.96s
96:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 6.48s	remaining: 6.88s
97:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 6.55s	remaining: 6.81s
98:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 6.6s	remaining: 6.74s
99:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 6.66s	remaining: 6.66s
100:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 6.71s	remaining: 6.58s
101:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 6.77s	remaining: 6.51s
102:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 6.83s	remaining: 6.43s
103:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 6.88s	remaining: 6.35s
104:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 6.93s	remaining: 6.28s
105:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 6.99s	remaining: 6.2s
106:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 7.05s	remaining: 6.13s
107:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 7.1s	remaining: 6.05s
108:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 7.16s	remaining: 5.98s
109:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 7.21s	remaining: 5.9s
110:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 7.27s	remaining: 5.83s
111:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 7.32s	remaining: 5.75s
112:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 7.38s	remaining: 5.68s
113:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 7.44s	remaining: 5.61s
114:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 7.57s	remaining: 5.6s
115:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 7.63s	remaining: 5.53s
116:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 7.68s	remaining: 5.45s
117:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 7.74s	remaining: 5.38s
118:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 7.79s	remaining: 5.31s
119:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 7.85s	remaining: 5.23s
120:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 7.91s	remaining: 5.16s
121:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 7.96s	remaining: 5.09s
122:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 8.04s	remaining: 5.03s
123:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 8.09s	remaining: 4.96s
124:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 8.15s	remaining: 4.89s
125:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 8.21s	remaining: 4.82s
126:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 8.28s	remaining: 4.76s
127:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 8.34s	remaining: 4.69s
128:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 8.41s	remaining: 4.63s
129:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 8.47s	remaining: 4.56s
130:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 8.53s	remaining: 4.49s
131:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 8.59s	remaining: 4.42s
132:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 8.64s	remaining: 4.36s
133:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 8.7s	remaining: 4.29s
134:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 8.76s	remaining: 4.22s
135:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 8.83s	remaining: 4.15s
136:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 8.89s	remaining: 4.09s
137:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 8.96s	remaining: 4.02s
138:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 9.02s	remaining: 3.96s
139:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 9.08s	remaining: 3.89s
140:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 9.13s	remaining: 3.82s
141:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 9.19s	remaining: 3.75s
142:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 9.25s	remaining: 3.69s
143:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 9.31s	remaining: 3.62s
144:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 9.37s	remaining: 3.55s
145:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 9.43s	remaining: 3.49s
146:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 9.49s	remaining: 3.42s
147:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 9.55s	remaining: 3.35s
148:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 9.61s	remaining: 3.29s
149:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 9.68s	remaining: 3.23s
150:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 9.73s	remaining: 3.16s
151:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 9.79s	remaining: 3.09s
152:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 9.85s	remaining: 3.03s
153:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 9.91s	remaining: 2.96s
154:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 9.97s	remaining: 2.89s
155:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 10s	remaining: 2.83s
156:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 10.1s	remaining: 2.77s
157:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 10.2s	remaining: 2.7s
158:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 10.2s	remaining: 2.64s
159:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 10.3s	remaining: 2.57s
160:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 10.4s	remaining: 2.51s
161:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 10.4s	remaining: 2.44s
162:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 10.5s	remaining: 2.38s
163:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 10.5s	remaining: 2.31s
164:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 10.6s	remaining: 2.25s
165:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 10.7s	remaining: 2.18s
166:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 10.7s	remaining: 2.12s
167:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 10.8s	remaining: 2.05s
168:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 10.8s	remaining: 1.99s
169:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 10.9s	remaining: 1.92s
170:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 11s	remaining: 1.86s
171:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 11s	remaining: 1.79s
172:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 11.1s	remaining: 1.73s
173:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 11.1s	remaining: 1.66s
174:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 11.2s	remaining: 1.6s
175:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 11.3s	remaining: 1.54s
176:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 11.3s	remaining: 1.47s
177:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 11.4s	remaining: 1.41s
178:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 11.4s	remaining: 1.34s
179:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 11.5s	remaining: 1.28s
180:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 11.6s	remaining: 1.22s
181:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 11.6s	remaining: 1.15s
182:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 11.7s	remaining: 1.09s
183:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 11.8s	remaining: 1.02s
184:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 11.8s	remaining: 959ms
185:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 11.9s	remaining: 895ms
186:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 11.9s	remaining: 830ms
187:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 12s	remaining: 766ms
188:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 12.1s	remaining: 703ms
189:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 12.1s	remaining: 638ms
190:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 12.2s	remaining: 574ms
191:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 12.3s	remaining: 511ms
192:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 12.3s	remaining: 447ms
193:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 12.4s	remaining: 383ms
194:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 12.4s	remaining: 319ms
195:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 12.5s	remaining: 255ms
196:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 12.6s	remaining: 191ms
197:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 12.6s	remaining: 127ms
198:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 12.7s	remaining: 63.7ms
199:	learn: 0.4242898	test: 0.4226493	best: 0.4226458 (7)	total: 12.7s	remaining: 0us

bestTest = 0.4226458445
bestIteration = 7

Shrink model to first 8 iterations.

In [40]:
preds_cat = model.predict_proba(df_test)

lgb


In [20]:
train_cats(df_raw);
apply_cats(df_test, df_raw);

In [81]:
train_cats(X_train);
apply_cats(X_valid, X_train);

In [21]:
def lable_encode_edu(x):
    switcher = {
        "High School Diploma": 1,
        "Matriculation": 2,
        "Bachelors": 3,
        "Masters": 4,
    }
    return switcher.get(x, 0)

def lable_encode_diff(x):
    switcher = {
        "easy": 1,
        "intermediate": 2,
        "hard": 3,
        "very hard": 4,
    }
    return switcher.get(x, 0)


df_raw["education"] = df_raw["education"].apply(lambda x: lable_encode_edu(x))
df_test["education"] = df_test["education"].apply(lambda x: lable_encode_edu(x))

df_raw["difficulty_level"] = df_raw["difficulty_level"].apply(lambda x: lable_encode_edu(x))
df_test["difficulty_level"] = df_test["difficulty_level"].apply(lambda x: lable_encode_edu(x))

In [24]:
print("LGB startting")

params = {
        'use_missing': True,
        'application': 'binary',
        'learning_rate': 0.05,
        'objective': 'binary',
        'max_depth': 11,
        'num_leaves': 500,
        'verbosity': 1,
        'metric' : ['auc', 'binary_logloss'], 
        'data_random_seed': 1,
        'bagging_fraction': 0.75,
        'feature_fraction': 0.75,
        'nthread': 4,
        'min_data_in_leaf': 100,
        'max_bin': 255,
        'is_unbalance': 'True',
        'max_cat_threshold' : 32,
        'enable_bundle' : False
    
    }


d_train = lgb.Dataset(df_raw, label= target)
#d_val = lgb.Dataset(X_valid, label=y_valid)

watchlist = [d_train]
model_lgb = lgb.train(params, train_set=d_train,num_boost_round=1500,verbose_eval=100) #valid_sets=watchlist, verbose_eval=100,\
                  #early_stopping_rounds=500,)

print('Finish LGB Training')


LGB startting
Finish LGB Training

In [36]:
df_raw = pd.get_dummies(df_raw, drop_first=True,prefix='dummy',)
df_test = pd.get_dummies(df_raw, drop_first=True,prefix='dummy',)

In [38]:
df_raw.shape, df_test.shape


Out[38]:
((73147, 497), (73147, 497))

In [25]:
def make_submission(probs):
    sample = pd.read_csv(f'{PATH}\\AV_Stud\\sample_submission_vaSxamm.csv')
    submit = sample.copy()
    submit['is_pass'] = probs
    return submit

In [26]:
preds_lgb = model_lgb.predict(data=df_test)

In [116]:
submit = make_submission(preds_xgb)

In [117]:
submit.head(2)


Out[117]:
id is_pass
0 1626_45 0.497688
1 11020_130 0.988605

In [118]:
submit.to_csv(f'{PATH}\\AV_Stud\\xgb_____.csv', index=False)

In [ ]:
model.

In [39]:
model_lgb.save_model(f'{PATH}\\AV_Stud\\model.txt', num_iteration=model_lgb.best_iteration)

In [40]:
json_model = model_lgb.dump_model()

xgb


In [88]:
train_cats(df_raw)
apply_cats(df_test,df_raw)

In [92]:
cols_dropped = ['program_type__program_duration','test_id__program_duration','test_id__test_type',\
                'test_type__difficulty_level','education__gender','education__city_tier',\
                'gender__city_tier']
df_test = df_test.drop(cols_dropped, axis=1)
df_raw = df_raw.drop(cols_dropped, axis=1)
df_raw.drop('is_pass', axis=1, inplace=True)

In [102]:
set(df_raw.columns) - set(df_test.columns)


Out[102]:
set()

In [98]:
df_raw = pd.get_dummies(df_raw, drop_first=True, prefix='dummy',columns=['program_id','program_type','test_type','difficulty_level',\
                                                                       'gender','education','is_handicapped'])

df_test = pd.get_dummies(df_test, drop_first=True, prefix='dummy',columns=['program_id','program_type','test_type','difficulty_level',\
                                                                       'gender','education','is_handicapped'])

In [114]:
def runXGB(train_X, train_y, test_X, test_y=None, seed_val=1, depth = 11, model = None):
    
        params = {}
        params['booster'] = 'gbtree'
        #params['updater'] = 'coord_descent'
        params["objective"] = "binary:logistic"
        params['eval_metric'] = 'auc'
        params["eta"] = 0.05 #0.03
        params["subsample"] = .85
        params["silent"] = 0
        params['verbose'] = 2
        params["max_depth"] = depth
        params["seed"] = seed_val
        params["max_delta_step"] = 4
        params['scale_pos_weight'] =  0.4380049934141978
        params["gamma"] = 0.6 #.5 #.1 #.2
        params['colsample_bytree'] = 0.9
        num_rounds = 2500 #3600 #2000 #4000

        plst = list(params.items())
        xgtrain = xgb.DMatrix(train_X, label=train_y)
        
        if test_y is not None:
                print('1st block\n')
                xgtest = xgb.DMatrix(test_X, label=test_y)
                watchlist = [ (xgtrain,'train'), (xgtest, 'test') ]
                model = xgb.train(plst, xgtrain, num_rounds, watchlist, early_stopping_rounds= 50,verbose_eval=True)
        else:
                print('2nd block\n')
                xgtest = xgb.DMatrix(test_X)
                watchlist = [ (xgtrain,'train')]
                print('########################### model ######################\n')
                model = xgb.train(plst, xgtrain, num_rounds)

        pred_test_y = model.predict(xgtest,ntree_limit=model.best_ntree_limit)
        
        return pred_test_y, model, plst, model.best_ntree_limit

In [111]:
df_raw.drop('dummy_Y', axis=1, inplace=True)
df_test.drop('dummy_Y', axis=1, inplace=True)

In [52]:
cols = np.unique(df_raw.columns)

In [115]:
%%time
preds_xgb, model, params, num_rounds = runXGB(df_raw, target, df_test)


2nd block

########################### model ######################

Wall time: 10min 45s

In [123]:
xgb.plot_importance(model,max_num_features=15);



In [125]:
df_raw.to_csv(f'{PATH}\\AV_Stud\\xgb_train_cleaned.csv', index=False)
df_test.to_csv(f'{PATH}\\AV_Stud\\xgb_test_cleaned.csv', index=False)

In [ ]:

xgb anokas


In [3]:
df_raw = pd.read_csv(f'{PATH}\\AV_Stud\\xgb_train_cleaned.csv')
df_test = pd.read_csv(f'{PATH}\\AV_Stud\\xgb_test_cleaned.csv')

In [5]:
df_raw.drop('trainee_id', axis =1, inplace=True)
df_test.drop('trainee_id', axis =1, inplace=True)

df_raw.drop('test_id', axis =1, inplace=True)
df_test.drop('test_id', axis =1, inplace=True)

In [9]:
df_raw['is_age_20_30'] = np.zeros(df_raw.shape[0])
my_query = df_raw.query('age>=20. & age<=30.').index
df_raw.iloc[my_query, -1] = 1

df_raw['is_age_30_40'] = np.zeros(df_raw.shape[0])
my_query = df_raw.query('age>=30. & age<=40.').index
df_raw.iloc[my_query, -1] = 1

df_test['is_age_20_30'] = np.zeros(df_test.shape[0])
my_query = df_test.query('age>=20. & age<=30.').index
df_test.iloc[my_query, -1] = 1

df_test['is_age_30_40'] = np.zeros(df_test.shape[0])
my_query = df_test.query('age>=30. & age<=40.').index
df_test.iloc[my_query, -1] = 1

In [25]:
import mlcrate

In [26]:
params = {}
params['booster'] = 'gbtree'
#params['updater'] = 'coord_descent'
params["objective"] = "binary:logistic"
params['eval_metric'] = 'auc'
params["eta"] = 0.05 #0.03
params["subsample"] = .85
params["silent"] = 0
params['verbose'] = 2
params["max_depth"] = 11
params["seed"] = 1
params["max_delta_step"] = 4
params['scale_pos_weight'] =  0.4380049934141978
params["gamma"] = 0.6 #.5 #.1 #.2
params['colsample_bytree'] = 0.9
params['nrounds'] = 2500 #3600 #2000 #4000

In [31]:
model, p_train, p_test = mlcrate.xgb.train_kfold(params, df_raw, target, df_test, folds =7, stratify=target)


[mlcrate] Training 7 stratified XGBoost models on training set (73147, 61) with test set (31349, 61)
[mlcrate] Running fold 0, 62697 train samples, 10450 validation samples
[0]	train-auc:0.792055	valid-auc:0.704242
Multiple eval metrics have been passed: 'valid-auc' will be used for early stopping.

Will train until valid-auc hasn't improved in 50 rounds.
[1]	train-auc:0.810779	valid-auc:0.720031
[2]	train-auc:0.819467	valid-auc:0.725405
[3]	train-auc:0.825992	valid-auc:0.727248
[4]	train-auc:0.829709	valid-auc:0.729409
[5]	train-auc:0.832152	valid-auc:0.731301
[6]	train-auc:0.834853	valid-auc:0.733199
[7]	train-auc:0.838118	valid-auc:0.733364
[8]	train-auc:0.840375	valid-auc:0.734197
[9]	train-auc:0.841851	valid-auc:0.734797
[10]	train-auc:0.843851	valid-auc:0.734994
[11]	train-auc:0.845404	valid-auc:0.735378
[12]	train-auc:0.847211	valid-auc:0.735713
[13]	train-auc:0.849296	valid-auc:0.735609
[14]	train-auc:0.850991	valid-auc:0.735679
[15]	train-auc:0.851657	valid-auc:0.736219
[16]	train-auc:0.853067	valid-auc:0.736382
[17]	train-auc:0.854394	valid-auc:0.736276
[18]	train-auc:0.855947	valid-auc:0.736622
[19]	train-auc:0.857717	valid-auc:0.737239
[20]	train-auc:0.859367	valid-auc:0.737467
[21]	train-auc:0.860904	valid-auc:0.737435
[22]	train-auc:0.86199	valid-auc:0.737549
[23]	train-auc:0.863399	valid-auc:0.737816
[24]	train-auc:0.864861	valid-auc:0.737935
[25]	train-auc:0.866084	valid-auc:0.738002
[26]	train-auc:0.86713	valid-auc:0.738287
[27]	train-auc:0.868232	valid-auc:0.738638
[28]	train-auc:0.869479	valid-auc:0.738813
[29]	train-auc:0.870231	valid-auc:0.738675
[30]	train-auc:0.87106	valid-auc:0.7388
[31]	train-auc:0.872178	valid-auc:0.738885
[32]	train-auc:0.873442	valid-auc:0.739034
[33]	train-auc:0.874144	valid-auc:0.739193
[34]	train-auc:0.875127	valid-auc:0.739466
[35]	train-auc:0.875781	valid-auc:0.739348
[36]	train-auc:0.877024	valid-auc:0.739538
[37]	train-auc:0.878387	valid-auc:0.739731
[38]	train-auc:0.879471	valid-auc:0.739916
[39]	train-auc:0.880601	valid-auc:0.739965
[40]	train-auc:0.881603	valid-auc:0.74029
[41]	train-auc:0.882097	valid-auc:0.740347
[42]	train-auc:0.883084	valid-auc:0.740637
[43]	train-auc:0.883924	valid-auc:0.740743
[44]	train-auc:0.884798	valid-auc:0.740549
[45]	train-auc:0.885963	valid-auc:0.740697
[46]	train-auc:0.886576	valid-auc:0.740962
[47]	train-auc:0.887442	valid-auc:0.741078
[48]	train-auc:0.88834	valid-auc:0.741255
[49]	train-auc:0.888984	valid-auc:0.741163
[50]	train-auc:0.889827	valid-auc:0.741189
[51]	train-auc:0.890342	valid-auc:0.741299
[52]	train-auc:0.891147	valid-auc:0.741201
[53]	train-auc:0.892183	valid-auc:0.741307
[54]	train-auc:0.893062	valid-auc:0.74141
[55]	train-auc:0.893658	valid-auc:0.741473
[56]	train-auc:0.8945	valid-auc:0.741534
[57]	train-auc:0.895201	valid-auc:0.741542
[58]	train-auc:0.896222	valid-auc:0.741727
[59]	train-auc:0.896784	valid-auc:0.7417
[60]	train-auc:0.897608	valid-auc:0.741673
[61]	train-auc:0.898624	valid-auc:0.741608
[62]	train-auc:0.899117	valid-auc:0.741706
[63]	train-auc:0.899877	valid-auc:0.742012
[64]	train-auc:0.900384	valid-auc:0.742165
[65]	train-auc:0.900764	valid-auc:0.742248
[66]	train-auc:0.901864	valid-auc:0.742577
[67]	train-auc:0.902112	valid-auc:0.742601
[68]	train-auc:0.90287	valid-auc:0.742673
[69]	train-auc:0.903323	valid-auc:0.74267
[70]	train-auc:0.904148	valid-auc:0.7429
[71]	train-auc:0.904776	valid-auc:0.743095
[72]	train-auc:0.90518	valid-auc:0.743113
[73]	train-auc:0.90582	valid-auc:0.743011
[74]	train-auc:0.906192	valid-auc:0.743023
[75]	train-auc:0.90756	valid-auc:0.743346
[76]	train-auc:0.907926	valid-auc:0.743477
[77]	train-auc:0.908411	valid-auc:0.743611
[78]	train-auc:0.908995	valid-auc:0.743629
[79]	train-auc:0.910244	valid-auc:0.743932
[80]	train-auc:0.910822	valid-auc:0.744039
[81]	train-auc:0.911124	valid-auc:0.744058
[82]	train-auc:0.91186	valid-auc:0.744032
[83]	train-auc:0.912628	valid-auc:0.744189
[84]	train-auc:0.913099	valid-auc:0.744258
[85]	train-auc:0.913437	valid-auc:0.744301
[86]	train-auc:0.914501	valid-auc:0.744412
[87]	train-auc:0.915277	valid-auc:0.744346
[88]	train-auc:0.915888	valid-auc:0.744384
[89]	train-auc:0.916447	valid-auc:0.744324
[90]	train-auc:0.917557	valid-auc:0.744584
[91]	train-auc:0.918031	valid-auc:0.744626
[92]	train-auc:0.918691	valid-auc:0.744613
[93]	train-auc:0.919101	valid-auc:0.744765
[94]	train-auc:0.919568	valid-auc:0.74477
[95]	train-auc:0.91987	valid-auc:0.744851
[96]	train-auc:0.92048	valid-auc:0.745068
[97]	train-auc:0.92076	valid-auc:0.745016
[98]	train-auc:0.921112	valid-auc:0.744999
[99]	train-auc:0.921637	valid-auc:0.745129
[100]	train-auc:0.922841	valid-auc:0.745202
[101]	train-auc:0.923264	valid-auc:0.745234
[102]	train-auc:0.923631	valid-auc:0.745403
[103]	train-auc:0.923802	valid-auc:0.745431
[104]	train-auc:0.924216	valid-auc:0.745876
[105]	train-auc:0.924755	valid-auc:0.745896
[106]	train-auc:0.92511	valid-auc:0.745961
[107]	train-auc:0.925745	valid-auc:0.74604
[108]	train-auc:0.925964	valid-auc:0.746009
[109]	train-auc:0.926098	valid-auc:0.745906
[110]	train-auc:0.926207	valid-auc:0.745943
[111]	train-auc:0.926476	valid-auc:0.746064
[112]	train-auc:0.92685	valid-auc:0.746025
[113]	train-auc:0.927672	valid-auc:0.745927
[114]	train-auc:0.928674	valid-auc:0.745793
[115]	train-auc:0.929125	valid-auc:0.74573
[116]	train-auc:0.930178	valid-auc:0.745793
[117]	train-auc:0.930284	valid-auc:0.745771
[118]	train-auc:0.930668	valid-auc:0.745746
[119]	train-auc:0.931241	valid-auc:0.745865
[120]	train-auc:0.931384	valid-auc:0.745923
[121]	train-auc:0.931938	valid-auc:0.746045
[122]	train-auc:0.932737	valid-auc:0.746154
[123]	train-auc:0.933376	valid-auc:0.746237
[124]	train-auc:0.933754	valid-auc:0.746439
[125]	train-auc:0.934248	valid-auc:0.746417
[126]	train-auc:0.93465	valid-auc:0.746543
[127]	train-auc:0.934901	valid-auc:0.746538
[128]	train-auc:0.935702	valid-auc:0.746742
[129]	train-auc:0.936316	valid-auc:0.746734
[130]	train-auc:0.936519	valid-auc:0.746737
[131]	train-auc:0.937056	valid-auc:0.746757
[132]	train-auc:0.937586	valid-auc:0.746786
[133]	train-auc:0.938298	valid-auc:0.746917
[134]	train-auc:0.938968	valid-auc:0.74683
[135]	train-auc:0.939192	valid-auc:0.746894
[136]	train-auc:0.939293	valid-auc:0.746887
[137]	train-auc:0.939548	valid-auc:0.746844
[138]	train-auc:0.940201	valid-auc:0.747063
[139]	train-auc:0.940635	valid-auc:0.746997
[140]	train-auc:0.940856	valid-auc:0.747088
[141]	train-auc:0.941173	valid-auc:0.746975
[142]	train-auc:0.941595	valid-auc:0.746854
[143]	train-auc:0.941998	valid-auc:0.746788
[144]	train-auc:0.942249	valid-auc:0.746852
[145]	train-auc:0.942896	valid-auc:0.746977
[146]	train-auc:0.943124	valid-auc:0.746913
[147]	train-auc:0.94339	valid-auc:0.746976
[148]	train-auc:0.944271	valid-auc:0.747297
[149]	train-auc:0.944944	valid-auc:0.747325
[150]	train-auc:0.945856	valid-auc:0.747222
[151]	train-auc:0.946593	valid-auc:0.747227
[152]	train-auc:0.946736	valid-auc:0.747173
[153]	train-auc:0.947116	valid-auc:0.747215
[154]	train-auc:0.947607	valid-auc:0.747142
[155]	train-auc:0.947892	valid-auc:0.747287
[156]	train-auc:0.948354	valid-auc:0.747261
[157]	train-auc:0.948776	valid-auc:0.74731
[158]	train-auc:0.949283	valid-auc:0.747333
[159]	train-auc:0.949798	valid-auc:0.747223
[160]	train-auc:0.950409	valid-auc:0.747075
[161]	train-auc:0.951162	valid-auc:0.747061
[162]	train-auc:0.951536	valid-auc:0.746995
[163]	train-auc:0.951977	valid-auc:0.747017
[164]	train-auc:0.952167	valid-auc:0.747058
[165]	train-auc:0.952853	valid-auc:0.747019
[166]	train-auc:0.95289	valid-auc:0.746991
[167]	train-auc:0.953204	valid-auc:0.746964
[168]	train-auc:0.953331	valid-auc:0.746935
[169]	train-auc:0.953792	valid-auc:0.747107
[170]	train-auc:0.954039	valid-auc:0.747114
[171]	train-auc:0.954212	valid-auc:0.747113
[172]	train-auc:0.954712	valid-auc:0.747099
[173]	train-auc:0.95499	valid-auc:0.747057
[174]	train-auc:0.955469	valid-auc:0.747046
[175]	train-auc:0.95605	valid-auc:0.747098
[176]	train-auc:0.956643	valid-auc:0.746956
[177]	train-auc:0.957133	valid-auc:0.746849
[178]	train-auc:0.957722	valid-auc:0.74691
[179]	train-auc:0.957966	valid-auc:0.746867
[180]	train-auc:0.958212	valid-auc:0.746975
[181]	train-auc:0.95891	valid-auc:0.746798
[182]	train-auc:0.959065	valid-auc:0.746774
[183]	train-auc:0.959211	valid-auc:0.746818
[184]	train-auc:0.959555	valid-auc:0.746978
[185]	train-auc:0.959743	valid-auc:0.746925
[186]	train-auc:0.959778	valid-auc:0.746921
[187]	train-auc:0.960115	valid-auc:0.746999
[188]	train-auc:0.960358	valid-auc:0.746998
[189]	train-auc:0.96093	valid-auc:0.746916
[190]	train-auc:0.961234	valid-auc:0.746883
[191]	train-auc:0.961691	valid-auc:0.746864
[192]	train-auc:0.962029	valid-auc:0.746951
[193]	train-auc:0.962489	valid-auc:0.746999
[194]	train-auc:0.962866	valid-auc:0.747096
[195]	train-auc:0.963356	valid-auc:0.74696
[196]	train-auc:0.963592	valid-auc:0.747012
[197]	train-auc:0.963809	valid-auc:0.747054
[198]	train-auc:0.964222	valid-auc:0.747067
[199]	train-auc:0.964446	valid-auc:0.747099
[200]	train-auc:0.964615	valid-auc:0.747115
[201]	train-auc:0.964878	valid-auc:0.74712
[202]	train-auc:0.965368	valid-auc:0.747166
[203]	train-auc:0.965726	valid-auc:0.747164
[204]	train-auc:0.966	valid-auc:0.747195
[205]	train-auc:0.966445	valid-auc:0.747157
[206]	train-auc:0.96688	valid-auc:0.747256
[207]	train-auc:0.967031	valid-auc:0.74719
[208]	train-auc:0.967215	valid-auc:0.747136
Stopping. Best iteration:
[158]	train-auc:0.949283	valid-auc:0.747333

C:\ProgramData\Anaconda3\lib\site-packages\mlcrate\backend.py:7: UserWarning: Timer.format_elapsed() has been deprecated in favour of Timer.fsince() and will be removed soon
  warn(message)
[mlcrate] Finished training fold 0 - took 48s - running score 0.747333
[mlcrate] Running fold 1, 62697 train samples, 10450 validation samples
[0]	train-auc:0.790873	valid-auc:0.717558
Multiple eval metrics have been passed: 'valid-auc' will be used for early stopping.

Will train until valid-auc hasn't improved in 50 rounds.
[1]	train-auc:0.808673	valid-auc:0.733986
[2]	train-auc:0.816236	valid-auc:0.738913
[3]	train-auc:0.821711	valid-auc:0.743052
[4]	train-auc:0.825566	valid-auc:0.745982
[5]	train-auc:0.828671	valid-auc:0.747562
[6]	train-auc:0.831671	valid-auc:0.747397
[7]	train-auc:0.834306	valid-auc:0.748607
[8]	train-auc:0.835955	valid-auc:0.749295
[9]	train-auc:0.838177	valid-auc:0.74959
[10]	train-auc:0.839973	valid-auc:0.750193
[11]	train-auc:0.842007	valid-auc:0.750757
[12]	train-auc:0.843963	valid-auc:0.751312
[13]	train-auc:0.845546	valid-auc:0.751623
[14]	train-auc:0.847264	valid-auc:0.752136
[15]	train-auc:0.848376	valid-auc:0.752138
[16]	train-auc:0.850043	valid-auc:0.752063
[17]	train-auc:0.851242	valid-auc:0.752459
[18]	train-auc:0.853066	valid-auc:0.7529
[19]	train-auc:0.854839	valid-auc:0.753066
[20]	train-auc:0.856394	valid-auc:0.753665
[21]	train-auc:0.857436	valid-auc:0.753875
[22]	train-auc:0.858872	valid-auc:0.754086
[23]	train-auc:0.859664	valid-auc:0.754312
[24]	train-auc:0.860645	valid-auc:0.754429
[25]	train-auc:0.861953	valid-auc:0.754799
[26]	train-auc:0.863154	valid-auc:0.755141
[27]	train-auc:0.864335	valid-auc:0.755649
[28]	train-auc:0.865509	valid-auc:0.755593
[29]	train-auc:0.866757	valid-auc:0.755855
[30]	train-auc:0.867735	valid-auc:0.755911
[31]	train-auc:0.869299	valid-auc:0.756131
[32]	train-auc:0.870546	valid-auc:0.756227
[33]	train-auc:0.871679	valid-auc:0.756439
[34]	train-auc:0.872762	valid-auc:0.756707
[35]	train-auc:0.873705	valid-auc:0.756643
[36]	train-auc:0.87495	valid-auc:0.75665
[37]	train-auc:0.876373	valid-auc:0.757182
[38]	train-auc:0.878164	valid-auc:0.757253
[39]	train-auc:0.879611	valid-auc:0.757416
[40]	train-auc:0.880705	valid-auc:0.757619
[41]	train-auc:0.881801	valid-auc:0.757485
[42]	train-auc:0.882623	valid-auc:0.757241
[43]	train-auc:0.883458	valid-auc:0.757344
[44]	train-auc:0.884233	valid-auc:0.757526
[45]	train-auc:0.885074	valid-auc:0.757519
[46]	train-auc:0.885975	valid-auc:0.757479
[47]	train-auc:0.886904	valid-auc:0.757715
[48]	train-auc:0.888125	valid-auc:0.757819
[49]	train-auc:0.889268	valid-auc:0.757861
[50]	train-auc:0.890625	valid-auc:0.757952
[51]	train-auc:0.891811	valid-auc:0.757882
[52]	train-auc:0.893064	valid-auc:0.758067
[53]	train-auc:0.894293	valid-auc:0.758172
[54]	train-auc:0.89529	valid-auc:0.758224
[55]	train-auc:0.8959	valid-auc:0.758233
[56]	train-auc:0.896875	valid-auc:0.758382
[57]	train-auc:0.89768	valid-auc:0.758425
[58]	train-auc:0.898672	valid-auc:0.758589
[59]	train-auc:0.89953	valid-auc:0.758533
[60]	train-auc:0.900102	valid-auc:0.758449
[61]	train-auc:0.901075	valid-auc:0.758409
[62]	train-auc:0.901854	valid-auc:0.758461
[63]	train-auc:0.902727	valid-auc:0.758374
[64]	train-auc:0.903472	valid-auc:0.758811
[65]	train-auc:0.904414	valid-auc:0.758919
[66]	train-auc:0.905115	valid-auc:0.759083
[67]	train-auc:0.906025	valid-auc:0.759116
[68]	train-auc:0.906765	valid-auc:0.758903
[69]	train-auc:0.907904	valid-auc:0.7589
[70]	train-auc:0.908651	valid-auc:0.759032
[71]	train-auc:0.909065	valid-auc:0.759101
[72]	train-auc:0.909589	valid-auc:0.759236
[73]	train-auc:0.910335	valid-auc:0.759178
[74]	train-auc:0.911181	valid-auc:0.759198
[75]	train-auc:0.911986	valid-auc:0.75921
[76]	train-auc:0.91227	valid-auc:0.759266
[77]	train-auc:0.91302	valid-auc:0.759139
[78]	train-auc:0.913574	valid-auc:0.759098
[79]	train-auc:0.914603	valid-auc:0.759162
[80]	train-auc:0.914952	valid-auc:0.759092
[81]	train-auc:0.915356	valid-auc:0.759066
[82]	train-auc:0.915622	valid-auc:0.759107
[83]	train-auc:0.915867	valid-auc:0.759231
[84]	train-auc:0.916632	valid-auc:0.759379
[85]	train-auc:0.91735	valid-auc:0.759564
[86]	train-auc:0.917935	valid-auc:0.759636
[87]	train-auc:0.918536	valid-auc:0.759652
[88]	train-auc:0.918747	valid-auc:0.759712
[89]	train-auc:0.919402	valid-auc:0.75967
[90]	train-auc:0.919877	valid-auc:0.759763
[91]	train-auc:0.920657	valid-auc:0.75972
[92]	train-auc:0.92152	valid-auc:0.759925
[93]	train-auc:0.921817	valid-auc:0.759928
[94]	train-auc:0.922433	valid-auc:0.759883
[95]	train-auc:0.923051	valid-auc:0.760181
[96]	train-auc:0.923393	valid-auc:0.760281
[97]	train-auc:0.924202	valid-auc:0.760226
[98]	train-auc:0.924379	valid-auc:0.760244
[99]	train-auc:0.924606	valid-auc:0.760282
[100]	train-auc:0.92518	valid-auc:0.760382
[101]	train-auc:0.925445	valid-auc:0.760472
[102]	train-auc:0.925977	valid-auc:0.760441
[103]	train-auc:0.926347	valid-auc:0.760459
[104]	train-auc:0.927003	valid-auc:0.760458
[105]	train-auc:0.927386	valid-auc:0.760281
[106]	train-auc:0.928067	valid-auc:0.760193
[107]	train-auc:0.928705	valid-auc:0.760243
[108]	train-auc:0.929428	valid-auc:0.760123
[109]	train-auc:0.929858	valid-auc:0.760137
[110]	train-auc:0.931138	valid-auc:0.759995
[111]	train-auc:0.931496	valid-auc:0.759972
[112]	train-auc:0.931884	valid-auc:0.760026
[113]	train-auc:0.932303	valid-auc:0.759942
[114]	train-auc:0.932514	valid-auc:0.759935
[115]	train-auc:0.932616	valid-auc:0.759938
[116]	train-auc:0.933017	valid-auc:0.760029
[117]	train-auc:0.933535	valid-auc:0.759982
[118]	train-auc:0.933877	valid-auc:0.759971
[119]	train-auc:0.934406	valid-auc:0.75994
[120]	train-auc:0.934544	valid-auc:0.760114
[121]	train-auc:0.935147	valid-auc:0.760026
[122]	train-auc:0.935612	valid-auc:0.760071
[123]	train-auc:0.936147	valid-auc:0.760064
[124]	train-auc:0.936834	valid-auc:0.760184
[125]	train-auc:0.937329	valid-auc:0.760247
[126]	train-auc:0.937811	valid-auc:0.760317
[127]	train-auc:0.938219	valid-auc:0.760392
[128]	train-auc:0.938708	valid-auc:0.760441
[129]	train-auc:0.939366	valid-auc:0.760425
[130]	train-auc:0.94033	valid-auc:0.760188
[131]	train-auc:0.941058	valid-auc:0.760329
[132]	train-auc:0.941569	valid-auc:0.760284
[133]	train-auc:0.941896	valid-auc:0.760312
[134]	train-auc:0.942257	valid-auc:0.760381
[135]	train-auc:0.942933	valid-auc:0.760348
[136]	train-auc:0.943207	valid-auc:0.760502
[137]	train-auc:0.943673	valid-auc:0.760493
[138]	train-auc:0.944232	valid-auc:0.760486
[139]	train-auc:0.944621	valid-auc:0.760434
[140]	train-auc:0.944982	valid-auc:0.760541
[141]	train-auc:0.945875	valid-auc:0.760572
[142]	train-auc:0.946531	valid-auc:0.760771
[143]	train-auc:0.947094	valid-auc:0.760827
[144]	train-auc:0.947541	valid-auc:0.760814
[145]	train-auc:0.947924	valid-auc:0.76082
[146]	train-auc:0.948754	valid-auc:0.760674
[147]	train-auc:0.949569	valid-auc:0.760738
[148]	train-auc:0.950062	valid-auc:0.760605
[149]	train-auc:0.950646	valid-auc:0.760795
[150]	train-auc:0.951229	valid-auc:0.760879
[151]	train-auc:0.951678	valid-auc:0.760861
[152]	train-auc:0.952031	valid-auc:0.760897
[153]	train-auc:0.952301	valid-auc:0.760751
[154]	train-auc:0.952647	valid-auc:0.760808
[155]	train-auc:0.953036	valid-auc:0.760678
[156]	train-auc:0.953309	valid-auc:0.76076
[157]	train-auc:0.953998	valid-auc:0.760701
[158]	train-auc:0.954402	valid-auc:0.760823
[159]	train-auc:0.954892	valid-auc:0.760761
[160]	train-auc:0.9555	valid-auc:0.760757
[161]	train-auc:0.956353	valid-auc:0.760561
[162]	train-auc:0.956681	valid-auc:0.760472
[163]	train-auc:0.957276	valid-auc:0.760532
[164]	train-auc:0.957483	valid-auc:0.760534
[165]	train-auc:0.957733	valid-auc:0.760468
[166]	train-auc:0.957927	valid-auc:0.760447
[167]	train-auc:0.958177	valid-auc:0.760398
[168]	train-auc:0.958634	valid-auc:0.76026
[169]	train-auc:0.958732	valid-auc:0.760266
[170]	train-auc:0.959039	valid-auc:0.760277
[171]	train-auc:0.959144	valid-auc:0.760302
[172]	train-auc:0.959561	valid-auc:0.760329
[173]	train-auc:0.959891	valid-auc:0.760486
[174]	train-auc:0.960321	valid-auc:0.760417
[175]	train-auc:0.960869	valid-auc:0.760372
[176]	train-auc:0.961221	valid-auc:0.760184
[177]	train-auc:0.961371	valid-auc:0.760117
[178]	train-auc:0.961723	valid-auc:0.760185
[179]	train-auc:0.962291	valid-auc:0.760212
[180]	train-auc:0.962816	valid-auc:0.76011
[181]	train-auc:0.963021	valid-auc:0.760109
[182]	train-auc:0.963368	valid-auc:0.76007
[183]	train-auc:0.963745	valid-auc:0.760105
[184]	train-auc:0.964099	valid-auc:0.760133
[185]	train-auc:0.964388	valid-auc:0.760201
[186]	train-auc:0.964625	valid-auc:0.760177
[187]	train-auc:0.96512	valid-auc:0.760363
[188]	train-auc:0.965428	valid-auc:0.76033
[189]	train-auc:0.965606	valid-auc:0.760312
[190]	train-auc:0.96583	valid-auc:0.760497
[191]	train-auc:0.966499	valid-auc:0.760577
[192]	train-auc:0.966683	valid-auc:0.76065
[193]	train-auc:0.967023	valid-auc:0.760583
[194]	train-auc:0.967481	valid-auc:0.760565
[195]	train-auc:0.967663	valid-auc:0.760563
[196]	train-auc:0.967893	valid-auc:0.760613
[197]	train-auc:0.968463	valid-auc:0.760903
[198]	train-auc:0.968576	valid-auc:0.760913
[199]	train-auc:0.968673	valid-auc:0.76091
[200]	train-auc:0.969049	valid-auc:0.761015
[201]	train-auc:0.969161	valid-auc:0.761092
[202]	train-auc:0.969401	valid-auc:0.761064
[203]	train-auc:0.969705	valid-auc:0.761055
[204]	train-auc:0.970157	valid-auc:0.760909
[205]	train-auc:0.970375	valid-auc:0.760881
[206]	train-auc:0.97089	valid-auc:0.761005
[207]	train-auc:0.971096	valid-auc:0.760966
[208]	train-auc:0.971362	valid-auc:0.761021
[209]	train-auc:0.971889	valid-auc:0.760892
[210]	train-auc:0.972204	valid-auc:0.760842
[211]	train-auc:0.972418	valid-auc:0.760806
[212]	train-auc:0.972822	valid-auc:0.760797
[213]	train-auc:0.973111	valid-auc:0.760793
[214]	train-auc:0.973282	valid-auc:0.760787
[215]	train-auc:0.973507	valid-auc:0.760955
[216]	train-auc:0.97374	valid-auc:0.761041
[217]	train-auc:0.973926	valid-auc:0.761017
[218]	train-auc:0.974273	valid-auc:0.761128
[219]	train-auc:0.974397	valid-auc:0.761209
[220]	train-auc:0.974576	valid-auc:0.761292
[221]	train-auc:0.974643	valid-auc:0.761354
[222]	train-auc:0.974951	valid-auc:0.761197
[223]	train-auc:0.975213	valid-auc:0.761165
[224]	train-auc:0.975258	valid-auc:0.761143
[225]	train-auc:0.975456	valid-auc:0.761147
[226]	train-auc:0.975589	valid-auc:0.761175
[227]	train-auc:0.975867	valid-auc:0.761252
[228]	train-auc:0.976114	valid-auc:0.761176
[229]	train-auc:0.97648	valid-auc:0.761038
[230]	train-auc:0.976776	valid-auc:0.761119
[231]	train-auc:0.976964	valid-auc:0.76104
[232]	train-auc:0.977439	valid-auc:0.761167
[233]	train-auc:0.977645	valid-auc:0.761167
[234]	train-auc:0.97803	valid-auc:0.761373
[235]	train-auc:0.978138	valid-auc:0.761422
[236]	train-auc:0.978269	valid-auc:0.761388
[237]	train-auc:0.978596	valid-auc:0.761363
[238]	train-auc:0.978906	valid-auc:0.761232
[239]	train-auc:0.979029	valid-auc:0.761229
[240]	train-auc:0.979276	valid-auc:0.761274
[241]	train-auc:0.979588	valid-auc:0.761266
[242]	train-auc:0.979781	valid-auc:0.761283
[243]	train-auc:0.979989	valid-auc:0.761246
[244]	train-auc:0.980062	valid-auc:0.761145
[245]	train-auc:0.980295	valid-auc:0.761159
[246]	train-auc:0.980652	valid-auc:0.761118
[247]	train-auc:0.980801	valid-auc:0.761066
[248]	train-auc:0.981028	valid-auc:0.760953
[249]	train-auc:0.981341	valid-auc:0.760934
[250]	train-auc:0.981482	valid-auc:0.760876
[251]	train-auc:0.981784	valid-auc:0.760783
[252]	train-auc:0.981842	valid-auc:0.760801
[253]	train-auc:0.982325	valid-auc:0.760863
[254]	train-auc:0.982404	valid-auc:0.760858
[255]	train-auc:0.982626	valid-auc:0.760835
[256]	train-auc:0.982791	valid-auc:0.760747
[257]	train-auc:0.982941	valid-auc:0.760674
[258]	train-auc:0.983086	valid-auc:0.760759
[259]	train-auc:0.983278	valid-auc:0.760794
[260]	train-auc:0.983395	valid-auc:0.760837
[261]	train-auc:0.983563	valid-auc:0.760871
[262]	train-auc:0.98373	valid-auc:0.760811
[263]	train-auc:0.983785	valid-auc:0.760744
[264]	train-auc:0.98397	valid-auc:0.760745
[265]	train-auc:0.984032	valid-auc:0.760758
[266]	train-auc:0.984088	valid-auc:0.760756
[267]	train-auc:0.98433	valid-auc:0.760639
[268]	train-auc:0.9845	valid-auc:0.760622
[269]	train-auc:0.984713	valid-auc:0.760642
[270]	train-auc:0.984932	valid-auc:0.760625
[271]	train-auc:0.985213	valid-auc:0.760529
[272]	train-auc:0.985406	valid-auc:0.760403
[273]	train-auc:0.985528	valid-auc:0.760382
[274]	train-auc:0.985681	valid-auc:0.760242
[275]	train-auc:0.985872	valid-auc:0.760179
[276]	train-auc:0.986053	valid-auc:0.76035
[277]	train-auc:0.986242	valid-auc:0.760271
[278]	train-auc:0.986393	valid-auc:0.760274
[279]	train-auc:0.986545	valid-auc:0.760199
[280]	train-auc:0.986801	valid-auc:0.7601
[281]	train-auc:0.986994	valid-auc:0.760082
[282]	train-auc:0.987147	valid-auc:0.76008
[283]	train-auc:0.987326	valid-auc:0.760035
[284]	train-auc:0.987545	valid-auc:0.759995
[285]	train-auc:0.987672	valid-auc:0.759865
Stopping. Best iteration:
[235]	train-auc:0.978138	valid-auc:0.761422

[mlcrate] Finished training fold 1 - took 1m07s - running score 0.7543775
[mlcrate] Running fold 2, 62697 train samples, 10450 validation samples
[0]	train-auc:0.791615	valid-auc:0.700609
Multiple eval metrics have been passed: 'valid-auc' will be used for early stopping.

Will train until valid-auc hasn't improved in 50 rounds.
[1]	train-auc:0.81128	valid-auc:0.718655
[2]	train-auc:0.818699	valid-auc:0.722376
[3]	train-auc:0.824752	valid-auc:0.724552
[4]	train-auc:0.828931	valid-auc:0.727088
[5]	train-auc:0.833067	valid-auc:0.726629
[6]	train-auc:0.835536	valid-auc:0.727204
[7]	train-auc:0.837294	valid-auc:0.727628
[8]	train-auc:0.83937	valid-auc:0.728515
[9]	train-auc:0.842066	valid-auc:0.729551
[10]	train-auc:0.843688	valid-auc:0.730798
[11]	train-auc:0.845321	valid-auc:0.731704
[12]	train-auc:0.84708	valid-auc:0.731763
[13]	train-auc:0.849083	valid-auc:0.731654
[14]	train-auc:0.850531	valid-auc:0.731554
[15]	train-auc:0.851414	valid-auc:0.732091
[16]	train-auc:0.853204	valid-auc:0.731863
[17]	train-auc:0.854369	valid-auc:0.732289
[18]	train-auc:0.855726	valid-auc:0.732519
[19]	train-auc:0.857282	valid-auc:0.73293
[20]	train-auc:0.858869	valid-auc:0.73309
[21]	train-auc:0.86016	valid-auc:0.732975
[22]	train-auc:0.86173	valid-auc:0.733305
[23]	train-auc:0.862924	valid-auc:0.733528
[24]	train-auc:0.864112	valid-auc:0.733809
[25]	train-auc:0.865024	valid-auc:0.733931
[26]	train-auc:0.866281	valid-auc:0.734509
[27]	train-auc:0.867231	valid-auc:0.734645
[28]	train-auc:0.868764	valid-auc:0.734724
[29]	train-auc:0.869846	valid-auc:0.735027
[30]	train-auc:0.871125	valid-auc:0.735457
[31]	train-auc:0.872151	valid-auc:0.736029
[32]	train-auc:0.87329	valid-auc:0.736147
[33]	train-auc:0.874147	valid-auc:0.736223
[34]	train-auc:0.875466	valid-auc:0.736378
[35]	train-auc:0.87638	valid-auc:0.736665
[36]	train-auc:0.87728	valid-auc:0.736686
[37]	train-auc:0.878103	valid-auc:0.736764
[38]	train-auc:0.879007	valid-auc:0.736961
[39]	train-auc:0.879682	valid-auc:0.737122
[40]	train-auc:0.881164	valid-auc:0.737293
[41]	train-auc:0.881898	valid-auc:0.737189
[42]	train-auc:0.882789	valid-auc:0.737473
[43]	train-auc:0.883571	valid-auc:0.737485
[44]	train-auc:0.884735	valid-auc:0.737598
[45]	train-auc:0.885561	valid-auc:0.737568
[46]	train-auc:0.88618	valid-auc:0.73748
[47]	train-auc:0.886778	valid-auc:0.737642
[48]	train-auc:0.887635	valid-auc:0.737854
[49]	train-auc:0.888384	valid-auc:0.737935
[50]	train-auc:0.8892	valid-auc:0.738042
[51]	train-auc:0.890315	valid-auc:0.738287
[52]	train-auc:0.891289	valid-auc:0.738407
[53]	train-auc:0.89223	valid-auc:0.738411
[54]	train-auc:0.893127	valid-auc:0.738505
[55]	train-auc:0.89391	valid-auc:0.738674
[56]	train-auc:0.894998	valid-auc:0.739047
[57]	train-auc:0.895744	valid-auc:0.739091
[58]	train-auc:0.896452	valid-auc:0.73916
[59]	train-auc:0.89725	valid-auc:0.739093
[60]	train-auc:0.897829	valid-auc:0.739182
[61]	train-auc:0.898605	valid-auc:0.739266
[62]	train-auc:0.899282	valid-auc:0.739254
[63]	train-auc:0.90008	valid-auc:0.739265
[64]	train-auc:0.900589	valid-auc:0.739386
[65]	train-auc:0.901323	valid-auc:0.739302
[66]	train-auc:0.902164	valid-auc:0.739311
[67]	train-auc:0.902775	valid-auc:0.739261
[68]	train-auc:0.903906	valid-auc:0.739504
[69]	train-auc:0.904695	valid-auc:0.739514
[70]	train-auc:0.905527	valid-auc:0.739682
[71]	train-auc:0.90639	valid-auc:0.739852
[72]	train-auc:0.90668	valid-auc:0.739811
[73]	train-auc:0.907405	valid-auc:0.739852
[74]	train-auc:0.908329	valid-auc:0.739985
[75]	train-auc:0.908799	valid-auc:0.740035
[76]	train-auc:0.909277	valid-auc:0.740133
[77]	train-auc:0.910243	valid-auc:0.740301
[78]	train-auc:0.911013	valid-auc:0.740442
[79]	train-auc:0.911684	valid-auc:0.740567
[80]	train-auc:0.91223	valid-auc:0.740747
[81]	train-auc:0.912878	valid-auc:0.740865
[82]	train-auc:0.913149	valid-auc:0.740871
[83]	train-auc:0.913572	valid-auc:0.740918
[84]	train-auc:0.91412	valid-auc:0.741137
[85]	train-auc:0.914956	valid-auc:0.741044
[86]	train-auc:0.915429	valid-auc:0.741019
[87]	train-auc:0.916418	valid-auc:0.741219
[88]	train-auc:0.916993	valid-auc:0.741459
[89]	train-auc:0.917605	valid-auc:0.741584
[90]	train-auc:0.918772	valid-auc:0.741611
[91]	train-auc:0.918906	valid-auc:0.741611
[92]	train-auc:0.919754	valid-auc:0.741714
[93]	train-auc:0.920327	valid-auc:0.741702
[94]	train-auc:0.921133	valid-auc:0.741556
[95]	train-auc:0.9215	valid-auc:0.741726
[96]	train-auc:0.922219	valid-auc:0.741899
[97]	train-auc:0.922814	valid-auc:0.742046
[98]	train-auc:0.92347	valid-auc:0.741976
[99]	train-auc:0.924784	valid-auc:0.742239
[100]	train-auc:0.925212	valid-auc:0.74229
[101]	train-auc:0.925321	valid-auc:0.742244
[102]	train-auc:0.925844	valid-auc:0.742172
[103]	train-auc:0.92613	valid-auc:0.742309
[104]	train-auc:0.927039	valid-auc:0.74231
[105]	train-auc:0.92733	valid-auc:0.742351
[106]	train-auc:0.927916	valid-auc:0.742337
[107]	train-auc:0.928504	valid-auc:0.74234
[108]	train-auc:0.928595	valid-auc:0.742326
[109]	train-auc:0.929025	valid-auc:0.742325
[110]	train-auc:0.929374	valid-auc:0.742334
[111]	train-auc:0.92976	valid-auc:0.742366
[112]	train-auc:0.930277	valid-auc:0.742411
[113]	train-auc:0.930363	valid-auc:0.742406
[114]	train-auc:0.930762	valid-auc:0.742386
[115]	train-auc:0.931493	valid-auc:0.742478
[116]	train-auc:0.931795	valid-auc:0.742492
[117]	train-auc:0.932538	valid-auc:0.742514
[118]	train-auc:0.933061	valid-auc:0.742613
[119]	train-auc:0.933786	valid-auc:0.742616
[120]	train-auc:0.934475	valid-auc:0.742862
[121]	train-auc:0.935289	valid-auc:0.743049
[122]	train-auc:0.935435	valid-auc:0.743022
[123]	train-auc:0.935831	valid-auc:0.742978
[124]	train-auc:0.936571	valid-auc:0.74296
[125]	train-auc:0.936628	valid-auc:0.742921
[126]	train-auc:0.937536	valid-auc:0.743079
[127]	train-auc:0.937915	valid-auc:0.743163
[128]	train-auc:0.938174	valid-auc:0.743082
[129]	train-auc:0.938433	valid-auc:0.743079
[130]	train-auc:0.938842	valid-auc:0.743109
[131]	train-auc:0.939444	valid-auc:0.743156
[132]	train-auc:0.940078	valid-auc:0.743162
[133]	train-auc:0.94028	valid-auc:0.743138
[134]	train-auc:0.940783	valid-auc:0.743174
[135]	train-auc:0.941428	valid-auc:0.743228
[136]	train-auc:0.942106	valid-auc:0.743211
[137]	train-auc:0.943312	valid-auc:0.743247
[138]	train-auc:0.94353	valid-auc:0.743257
[139]	train-auc:0.943951	valid-auc:0.74344
[140]	train-auc:0.944466	valid-auc:0.743526
[141]	train-auc:0.945307	valid-auc:0.743736
[142]	train-auc:0.946332	valid-auc:0.743639
[143]	train-auc:0.947102	valid-auc:0.743569
[144]	train-auc:0.94774	valid-auc:0.743602
[145]	train-auc:0.94841	valid-auc:0.743652
[146]	train-auc:0.949006	valid-auc:0.743472
[147]	train-auc:0.949314	valid-auc:0.743481
[148]	train-auc:0.949854	valid-auc:0.743427
[149]	train-auc:0.950227	valid-auc:0.743557
[150]	train-auc:0.950649	valid-auc:0.743596
[151]	train-auc:0.950935	valid-auc:0.743666
[152]	train-auc:0.951114	valid-auc:0.743643
[153]	train-auc:0.951707	valid-auc:0.743627
[154]	train-auc:0.951808	valid-auc:0.743564
[155]	train-auc:0.952099	valid-auc:0.743712
[156]	train-auc:0.952501	valid-auc:0.743742
[157]	train-auc:0.953007	valid-auc:0.743648
[158]	train-auc:0.953181	valid-auc:0.743647
[159]	train-auc:0.953388	valid-auc:0.743617
[160]	train-auc:0.954013	valid-auc:0.743532
[161]	train-auc:0.954658	valid-auc:0.743538
[162]	train-auc:0.955035	valid-auc:0.743501
[163]	train-auc:0.955356	valid-auc:0.743518
[164]	train-auc:0.955803	valid-auc:0.743368
[165]	train-auc:0.956046	valid-auc:0.743373
[166]	train-auc:0.956439	valid-auc:0.743336
[167]	train-auc:0.956874	valid-auc:0.743303
[168]	train-auc:0.957171	valid-auc:0.74339
[169]	train-auc:0.957507	valid-auc:0.743337
[170]	train-auc:0.958118	valid-auc:0.743405
[171]	train-auc:0.95847	valid-auc:0.743306
[172]	train-auc:0.958604	valid-auc:0.743368
[173]	train-auc:0.95891	valid-auc:0.743399
[174]	train-auc:0.959215	valid-auc:0.743439
[175]	train-auc:0.959645	valid-auc:0.74341
[176]	train-auc:0.959927	valid-auc:0.743396
[177]	train-auc:0.960316	valid-auc:0.743479
[178]	train-auc:0.960987	valid-auc:0.743432
[179]	train-auc:0.961435	valid-auc:0.743452
[180]	train-auc:0.961507	valid-auc:0.743445
[181]	train-auc:0.962072	valid-auc:0.743435
[182]	train-auc:0.962121	valid-auc:0.743398
[183]	train-auc:0.962439	valid-auc:0.743278
[184]	train-auc:0.962675	valid-auc:0.743407
[185]	train-auc:0.962838	valid-auc:0.743369
[186]	train-auc:0.963034	valid-auc:0.743317
[187]	train-auc:0.96376	valid-auc:0.743482
[188]	train-auc:0.963901	valid-auc:0.743392
[189]	train-auc:0.964286	valid-auc:0.74338
[190]	train-auc:0.964608	valid-auc:0.743395
[191]	train-auc:0.965181	valid-auc:0.743282
[192]	train-auc:0.965326	valid-auc:0.743256
[193]	train-auc:0.96564	valid-auc:0.743283
[194]	train-auc:0.966013	valid-auc:0.743327
[195]	train-auc:0.966185	valid-auc:0.743445
[196]	train-auc:0.96654	valid-auc:0.743395
[197]	train-auc:0.966962	valid-auc:0.743504
[198]	train-auc:0.967053	valid-auc:0.743441
[199]	train-auc:0.967435	valid-auc:0.743409
[200]	train-auc:0.968022	valid-auc:0.743241
[201]	train-auc:0.968347	valid-auc:0.743276
[202]	train-auc:0.968653	valid-auc:0.743262
[203]	train-auc:0.96928	valid-auc:0.743238
[204]	train-auc:0.969363	valid-auc:0.743247
[205]	train-auc:0.969503	valid-auc:0.743182
[206]	train-auc:0.969714	valid-auc:0.743176
Stopping. Best iteration:
[156]	train-auc:0.952501	valid-auc:0.743742

[mlcrate] Finished training fold 2 - took 52s - running score 0.7508323333333333
[mlcrate] Running fold 3, 62697 train samples, 10450 validation samples
[0]	train-auc:0.788233	valid-auc:0.709202
Multiple eval metrics have been passed: 'valid-auc' will be used for early stopping.

Will train until valid-auc hasn't improved in 50 rounds.
[1]	train-auc:0.808358	valid-auc:0.722429
[2]	train-auc:0.816283	valid-auc:0.72803
[3]	train-auc:0.823826	valid-auc:0.731367
[4]	train-auc:0.827151	valid-auc:0.734216
[5]	train-auc:0.830592	valid-auc:0.735201
[6]	train-auc:0.833325	valid-auc:0.736334
[7]	train-auc:0.836124	valid-auc:0.736955
[8]	train-auc:0.838241	valid-auc:0.737022
[9]	train-auc:0.839737	valid-auc:0.738124
[10]	train-auc:0.841755	valid-auc:0.738315
[11]	train-auc:0.843134	valid-auc:0.738929
[12]	train-auc:0.844598	valid-auc:0.739639
[13]	train-auc:0.846316	valid-auc:0.738863
[14]	train-auc:0.848035	valid-auc:0.738877
[15]	train-auc:0.849273	valid-auc:0.738705
[16]	train-auc:0.850534	valid-auc:0.739135
[17]	train-auc:0.852275	valid-auc:0.739285
[18]	train-auc:0.853581	valid-auc:0.73966
[19]	train-auc:0.854783	valid-auc:0.740072
[20]	train-auc:0.85595	valid-auc:0.740054
[21]	train-auc:0.857832	valid-auc:0.74062
[22]	train-auc:0.859005	valid-auc:0.740718
[23]	train-auc:0.860508	valid-auc:0.740819
[24]	train-auc:0.861756	valid-auc:0.740899
[25]	train-auc:0.863105	valid-auc:0.741048
[26]	train-auc:0.863996	valid-auc:0.7412
[27]	train-auc:0.864912	valid-auc:0.741398
[28]	train-auc:0.865843	valid-auc:0.741649
[29]	train-auc:0.866862	valid-auc:0.741757
[30]	train-auc:0.867848	valid-auc:0.741806
[31]	train-auc:0.869232	valid-auc:0.742196
[32]	train-auc:0.870849	valid-auc:0.742129
[33]	train-auc:0.871689	valid-auc:0.742027
[34]	train-auc:0.872541	valid-auc:0.74206
[35]	train-auc:0.87385	valid-auc:0.742548
[36]	train-auc:0.874725	valid-auc:0.742493
[37]	train-auc:0.87592	valid-auc:0.742623
[38]	train-auc:0.877112	valid-auc:0.74291
[39]	train-auc:0.878107	valid-auc:0.743209
[40]	train-auc:0.87916	valid-auc:0.743588
[41]	train-auc:0.880619	valid-auc:0.743915
[42]	train-auc:0.881309	valid-auc:0.744008
[43]	train-auc:0.882343	valid-auc:0.74435
[44]	train-auc:0.883425	valid-auc:0.744517
[45]	train-auc:0.884445	valid-auc:0.744636
[46]	train-auc:0.885199	valid-auc:0.744936
[47]	train-auc:0.886421	valid-auc:0.744859
[48]	train-auc:0.887401	valid-auc:0.745088
[49]	train-auc:0.888312	valid-auc:0.745227
[50]	train-auc:0.888934	valid-auc:0.745197
[51]	train-auc:0.890024	valid-auc:0.745292
[52]	train-auc:0.890839	valid-auc:0.745364
[53]	train-auc:0.891523	valid-auc:0.745329
[54]	train-auc:0.892527	valid-auc:0.745584
[55]	train-auc:0.893009	valid-auc:0.745707
[56]	train-auc:0.894365	valid-auc:0.74591
[57]	train-auc:0.895327	valid-auc:0.745992
[58]	train-auc:0.896003	valid-auc:0.746006
[59]	train-auc:0.896969	valid-auc:0.746215
[60]	train-auc:0.898093	valid-auc:0.746089
[61]	train-auc:0.898545	valid-auc:0.746107
[62]	train-auc:0.899096	valid-auc:0.746212
[63]	train-auc:0.899553	valid-auc:0.746176
[64]	train-auc:0.899946	valid-auc:0.746026
[65]	train-auc:0.900263	valid-auc:0.745986
[66]	train-auc:0.900893	valid-auc:0.746162
[67]	train-auc:0.901531	valid-auc:0.746156
[68]	train-auc:0.902118	valid-auc:0.746223
[69]	train-auc:0.903118	valid-auc:0.746248
[70]	train-auc:0.903625	valid-auc:0.746398
[71]	train-auc:0.904195	valid-auc:0.746503
[72]	train-auc:0.905089	valid-auc:0.746791
[73]	train-auc:0.90559	valid-auc:0.746783
[74]	train-auc:0.906117	valid-auc:0.746937
[75]	train-auc:0.90653	valid-auc:0.747018
[76]	train-auc:0.907213	valid-auc:0.747067
[77]	train-auc:0.907676	valid-auc:0.747047
[78]	train-auc:0.908304	valid-auc:0.747196
[79]	train-auc:0.909008	valid-auc:0.747164
[80]	train-auc:0.90924	valid-auc:0.747194
[81]	train-auc:0.909741	valid-auc:0.747197
[82]	train-auc:0.910981	valid-auc:0.747418
[83]	train-auc:0.911652	valid-auc:0.747483
[84]	train-auc:0.912689	valid-auc:0.747619
[85]	train-auc:0.913126	valid-auc:0.747652
[86]	train-auc:0.91349	valid-auc:0.747755
[87]	train-auc:0.91433	valid-auc:0.748079
[88]	train-auc:0.914861	valid-auc:0.748155
[89]	train-auc:0.915592	valid-auc:0.748177
[90]	train-auc:0.916551	valid-auc:0.748199
[91]	train-auc:0.91678	valid-auc:0.748239
[92]	train-auc:0.91721	valid-auc:0.748309
[93]	train-auc:0.917723	valid-auc:0.748262
[94]	train-auc:0.918503	valid-auc:0.748462
[95]	train-auc:0.918861	valid-auc:0.74842
[96]	train-auc:0.919002	valid-auc:0.748383
[97]	train-auc:0.919751	valid-auc:0.748502
[98]	train-auc:0.920636	valid-auc:0.748575
[99]	train-auc:0.921608	valid-auc:0.74863
[100]	train-auc:0.92191	valid-auc:0.748616
[101]	train-auc:0.922244	valid-auc:0.748755
[102]	train-auc:0.922641	valid-auc:0.748933
[103]	train-auc:0.9228	valid-auc:0.748929
[104]	train-auc:0.923319	valid-auc:0.749046
[105]	train-auc:0.923495	valid-auc:0.749018
[106]	train-auc:0.92394	valid-auc:0.748964
[107]	train-auc:0.925093	valid-auc:0.749422
[108]	train-auc:0.925556	valid-auc:0.749463
[109]	train-auc:0.926127	valid-auc:0.749359
[110]	train-auc:0.927204	valid-auc:0.749421
[111]	train-auc:0.927794	valid-auc:0.749265
[112]	train-auc:0.92797	valid-auc:0.749246
[113]	train-auc:0.928579	valid-auc:0.749247
[114]	train-auc:0.928805	valid-auc:0.749263
[115]	train-auc:0.929321	valid-auc:0.749255
[116]	train-auc:0.929768	valid-auc:0.749087
[117]	train-auc:0.930262	valid-auc:0.74922
[118]	train-auc:0.930776	valid-auc:0.749216
[119]	train-auc:0.931053	valid-auc:0.749179
[120]	train-auc:0.931779	valid-auc:0.74928
[121]	train-auc:0.932213	valid-auc:0.749442
[122]	train-auc:0.932765	valid-auc:0.749455
[123]	train-auc:0.933104	valid-auc:0.749471
[124]	train-auc:0.933539	valid-auc:0.749503
[125]	train-auc:0.93391	valid-auc:0.749434
[126]	train-auc:0.934368	valid-auc:0.749477
[127]	train-auc:0.935094	valid-auc:0.749513
[128]	train-auc:0.935582	valid-auc:0.749728
[129]	train-auc:0.936097	valid-auc:0.749752
[130]	train-auc:0.936391	valid-auc:0.749743
[131]	train-auc:0.936758	valid-auc:0.749738
[132]	train-auc:0.937003	valid-auc:0.749724
[133]	train-auc:0.937024	valid-auc:0.749731
[134]	train-auc:0.937884	valid-auc:0.749764
[135]	train-auc:0.938145	valid-auc:0.749734
[136]	train-auc:0.938985	valid-auc:0.749703
[137]	train-auc:0.939494	valid-auc:0.749558
[138]	train-auc:0.93987	valid-auc:0.749702
[139]	train-auc:0.940366	valid-auc:0.749749
[140]	train-auc:0.940981	valid-auc:0.749907
[141]	train-auc:0.941384	valid-auc:0.749997
[142]	train-auc:0.941775	valid-auc:0.750108
[143]	train-auc:0.942519	valid-auc:0.750112
[144]	train-auc:0.942692	valid-auc:0.750166
[145]	train-auc:0.943109	valid-auc:0.750312
[146]	train-auc:0.943431	valid-auc:0.750415
[147]	train-auc:0.943895	valid-auc:0.750527
[148]	train-auc:0.944314	valid-auc:0.750512
[149]	train-auc:0.944611	valid-auc:0.75059
[150]	train-auc:0.945204	valid-auc:0.750711
[151]	train-auc:0.945404	valid-auc:0.750688
[152]	train-auc:0.945905	valid-auc:0.75052
[153]	train-auc:0.946033	valid-auc:0.750507
[154]	train-auc:0.946354	valid-auc:0.750476
[155]	train-auc:0.946574	valid-auc:0.75056
[156]	train-auc:0.947052	valid-auc:0.75068
[157]	train-auc:0.947327	valid-auc:0.750706
[158]	train-auc:0.947828	valid-auc:0.750718
[159]	train-auc:0.948078	valid-auc:0.750613
[160]	train-auc:0.948374	valid-auc:0.750539
[161]	train-auc:0.948912	valid-auc:0.750756
[162]	train-auc:0.94956	valid-auc:0.750752
[163]	train-auc:0.949898	valid-auc:0.750797
[164]	train-auc:0.950166	valid-auc:0.750763
[165]	train-auc:0.95061	valid-auc:0.750748
[166]	train-auc:0.95106	valid-auc:0.750705
[167]	train-auc:0.951494	valid-auc:0.750781
[168]	train-auc:0.95159	valid-auc:0.750782
[169]	train-auc:0.952062	valid-auc:0.750865
[170]	train-auc:0.952869	valid-auc:0.750823
[171]	train-auc:0.95324	valid-auc:0.750969
[172]	train-auc:0.953816	valid-auc:0.751105
[173]	train-auc:0.954331	valid-auc:0.75105
[174]	train-auc:0.954806	valid-auc:0.751137
[175]	train-auc:0.955069	valid-auc:0.751077
[176]	train-auc:0.955587	valid-auc:0.75106
[177]	train-auc:0.95568	valid-auc:0.751093
[178]	train-auc:0.956168	valid-auc:0.751141
[179]	train-auc:0.956547	valid-auc:0.751174
[180]	train-auc:0.956762	valid-auc:0.751138
[181]	train-auc:0.956917	valid-auc:0.751095
[182]	train-auc:0.957184	valid-auc:0.751028
[183]	train-auc:0.957568	valid-auc:0.751119
[184]	train-auc:0.958196	valid-auc:0.750942
[185]	train-auc:0.958421	valid-auc:0.750959
[186]	train-auc:0.959018	valid-auc:0.750835
[187]	train-auc:0.959295	valid-auc:0.750992
[188]	train-auc:0.959691	valid-auc:0.751023
[189]	train-auc:0.959899	valid-auc:0.751048
[190]	train-auc:0.960055	valid-auc:0.751012
[191]	train-auc:0.960399	valid-auc:0.75096
[192]	train-auc:0.960679	valid-auc:0.751041
[193]	train-auc:0.960802	valid-auc:0.751088
[194]	train-auc:0.961574	valid-auc:0.751414
[195]	train-auc:0.96186	valid-auc:0.751439
[196]	train-auc:0.962041	valid-auc:0.751493
[197]	train-auc:0.962534	valid-auc:0.751668
[198]	train-auc:0.963025	valid-auc:0.751611
[199]	train-auc:0.963185	valid-auc:0.751517
[200]	train-auc:0.963567	valid-auc:0.75142
[201]	train-auc:0.963735	valid-auc:0.751531
[202]	train-auc:0.964158	valid-auc:0.751439
[203]	train-auc:0.96474	valid-auc:0.751496
[204]	train-auc:0.965107	valid-auc:0.751494
[205]	train-auc:0.965415	valid-auc:0.75148
[206]	train-auc:0.965975	valid-auc:0.751579
[207]	train-auc:0.966346	valid-auc:0.751644
[208]	train-auc:0.966828	valid-auc:0.751683
[209]	train-auc:0.967017	valid-auc:0.751717
[210]	train-auc:0.967055	valid-auc:0.751714
[211]	train-auc:0.967245	valid-auc:0.751721
[212]	train-auc:0.967513	valid-auc:0.751724
[213]	train-auc:0.967906	valid-auc:0.751801
[214]	train-auc:0.968291	valid-auc:0.751777
[215]	train-auc:0.968529	valid-auc:0.751727
[216]	train-auc:0.968689	valid-auc:0.751706
[217]	train-auc:0.969015	valid-auc:0.751636
[218]	train-auc:0.969352	valid-auc:0.751599
[219]	train-auc:0.969669	valid-auc:0.751682
[220]	train-auc:0.969956	valid-auc:0.751768
[221]	train-auc:0.970321	valid-auc:0.751656
[222]	train-auc:0.97069	valid-auc:0.751642
[223]	train-auc:0.970992	valid-auc:0.751487
[224]	train-auc:0.97137	valid-auc:0.751503
[225]	train-auc:0.97184	valid-auc:0.751517
[226]	train-auc:0.972039	valid-auc:0.751525
[227]	train-auc:0.9725	valid-auc:0.751497
[228]	train-auc:0.972735	valid-auc:0.751403
[229]	train-auc:0.97309	valid-auc:0.751514
[230]	train-auc:0.973465	valid-auc:0.751393
[231]	train-auc:0.973748	valid-auc:0.751332
[232]	train-auc:0.973875	valid-auc:0.751459
[233]	train-auc:0.973916	valid-auc:0.751439
[234]	train-auc:0.974165	valid-auc:0.751562
[235]	train-auc:0.974351	valid-auc:0.751532
[236]	train-auc:0.97451	valid-auc:0.751584
[237]	train-auc:0.974933	valid-auc:0.751448
[238]	train-auc:0.975168	valid-auc:0.751466
[239]	train-auc:0.975551	valid-auc:0.751288
[240]	train-auc:0.975768	valid-auc:0.75129
[241]	train-auc:0.976132	valid-auc:0.75128
[242]	train-auc:0.976292	valid-auc:0.751303
[243]	train-auc:0.976447	valid-auc:0.751323
[244]	train-auc:0.976718	valid-auc:0.751234
[245]	train-auc:0.976926	valid-auc:0.751153
[246]	train-auc:0.977053	valid-auc:0.751058
[247]	train-auc:0.977335	valid-auc:0.75097
[248]	train-auc:0.977603	valid-auc:0.751045
[249]	train-auc:0.977886	valid-auc:0.751071
[250]	train-auc:0.978187	valid-auc:0.751095
[251]	train-auc:0.978511	valid-auc:0.751233
[252]	train-auc:0.978706	valid-auc:0.751271
[253]	train-auc:0.978858	valid-auc:0.751257
[254]	train-auc:0.979075	valid-auc:0.751308
[255]	train-auc:0.979129	valid-auc:0.751321
[256]	train-auc:0.979616	valid-auc:0.751438
[257]	train-auc:0.979891	valid-auc:0.751525
[258]	train-auc:0.979963	valid-auc:0.751544
[259]	train-auc:0.980242	valid-auc:0.751658
[260]	train-auc:0.980409	valid-auc:0.751782
[261]	train-auc:0.980506	valid-auc:0.751799
[262]	train-auc:0.980789	valid-auc:0.751723
[263]	train-auc:0.980988	valid-auc:0.751666
Stopping. Best iteration:
[213]	train-auc:0.967906	valid-auc:0.751801

[mlcrate] Finished training fold 3 - took 59s - running score 0.7510745
[mlcrate] Running fold 4, 62697 train samples, 10450 validation samples
[0]	train-auc:0.78648	valid-auc:0.710739
Multiple eval metrics have been passed: 'valid-auc' will be used for early stopping.

Will train until valid-auc hasn't improved in 50 rounds.
[1]	train-auc:0.808375	valid-auc:0.725779
[2]	train-auc:0.815924	valid-auc:0.732458
[3]	train-auc:0.822773	valid-auc:0.736246
[4]	train-auc:0.826606	valid-auc:0.737715
[5]	train-auc:0.829312	valid-auc:0.73979
[6]	train-auc:0.832341	valid-auc:0.741209
[7]	train-auc:0.835544	valid-auc:0.742223
[8]	train-auc:0.837431	valid-auc:0.742844
[9]	train-auc:0.840242	valid-auc:0.743405
[10]	train-auc:0.842556	valid-auc:0.74425
[11]	train-auc:0.844171	valid-auc:0.745
[12]	train-auc:0.845886	valid-auc:0.745217
[13]	train-auc:0.847747	valid-auc:0.745895
[14]	train-auc:0.849473	valid-auc:0.746926
[15]	train-auc:0.850646	valid-auc:0.747038
[16]	train-auc:0.851939	valid-auc:0.747669
[17]	train-auc:0.853473	valid-auc:0.747671
[18]	train-auc:0.855097	valid-auc:0.747962
[19]	train-auc:0.857117	valid-auc:0.748443
[20]	train-auc:0.858578	valid-auc:0.748409
[21]	train-auc:0.859177	valid-auc:0.748939
[22]	train-auc:0.860438	valid-auc:0.749266
[23]	train-auc:0.861628	valid-auc:0.750155
[24]	train-auc:0.862866	valid-auc:0.750607
[25]	train-auc:0.864058	valid-auc:0.750882
[26]	train-auc:0.865185	valid-auc:0.751038
[27]	train-auc:0.866596	valid-auc:0.751052
[28]	train-auc:0.867897	valid-auc:0.751324
[29]	train-auc:0.869453	valid-auc:0.751354
[30]	train-auc:0.870698	valid-auc:0.751327
[31]	train-auc:0.871773	valid-auc:0.751404
[32]	train-auc:0.872994	valid-auc:0.751679
[33]	train-auc:0.873756	valid-auc:0.75187
[34]	train-auc:0.875171	valid-auc:0.752205
[35]	train-auc:0.875985	valid-auc:0.75224
[36]	train-auc:0.877149	valid-auc:0.752441
[37]	train-auc:0.878363	valid-auc:0.752184
[38]	train-auc:0.87923	valid-auc:0.752337
[39]	train-auc:0.88028	valid-auc:0.75257
[40]	train-auc:0.881196	valid-auc:0.752577
[41]	train-auc:0.881985	valid-auc:0.752558
[42]	train-auc:0.8829	valid-auc:0.752693
[43]	train-auc:0.883771	valid-auc:0.752748
[44]	train-auc:0.884905	valid-auc:0.75304
[45]	train-auc:0.885746	valid-auc:0.753084
[46]	train-auc:0.886622	valid-auc:0.753024
[47]	train-auc:0.887401	valid-auc:0.752984
[48]	train-auc:0.888416	valid-auc:0.752982
[49]	train-auc:0.889401	valid-auc:0.752989
[50]	train-auc:0.889883	valid-auc:0.753106
[51]	train-auc:0.891255	valid-auc:0.753275
[52]	train-auc:0.8924	valid-auc:0.753321
[53]	train-auc:0.893526	valid-auc:0.753412
[54]	train-auc:0.894547	valid-auc:0.753443
[55]	train-auc:0.895253	valid-auc:0.753471
[56]	train-auc:0.896452	valid-auc:0.753675
[57]	train-auc:0.897053	valid-auc:0.753945
[58]	train-auc:0.897752	valid-auc:0.753949
[59]	train-auc:0.898453	valid-auc:0.754075
[60]	train-auc:0.899175	valid-auc:0.754094
[61]	train-auc:0.900052	valid-auc:0.754108
[62]	train-auc:0.900519	valid-auc:0.754156
[63]	train-auc:0.90126	valid-auc:0.754245
[64]	train-auc:0.901675	valid-auc:0.754307
[65]	train-auc:0.902018	valid-auc:0.754343
[66]	train-auc:0.902896	valid-auc:0.754842
[67]	train-auc:0.90415	valid-auc:0.754866
[68]	train-auc:0.905154	valid-auc:0.754773
[69]	train-auc:0.905839	valid-auc:0.75511
[70]	train-auc:0.906595	valid-auc:0.755269
[71]	train-auc:0.907578	valid-auc:0.755516
[72]	train-auc:0.908044	valid-auc:0.7556
[73]	train-auc:0.908551	valid-auc:0.755734
[74]	train-auc:0.908991	valid-auc:0.755822
[75]	train-auc:0.909807	valid-auc:0.756098
[76]	train-auc:0.91042	valid-auc:0.756164
[77]	train-auc:0.910939	valid-auc:0.756301
[78]	train-auc:0.911555	valid-auc:0.756346
[79]	train-auc:0.911831	valid-auc:0.756293
[80]	train-auc:0.913057	valid-auc:0.756459
[81]	train-auc:0.913663	valid-auc:0.756596
[82]	train-auc:0.914364	valid-auc:0.75665
[83]	train-auc:0.914798	valid-auc:0.756825
[84]	train-auc:0.91577	valid-auc:0.756783
[85]	train-auc:0.916347	valid-auc:0.756717
[86]	train-auc:0.916753	valid-auc:0.756748
[87]	train-auc:0.916952	valid-auc:0.756879
[88]	train-auc:0.91746	valid-auc:0.757109
[89]	train-auc:0.917593	valid-auc:0.757094
[90]	train-auc:0.917866	valid-auc:0.757044
[91]	train-auc:0.918491	valid-auc:0.756949
[92]	train-auc:0.919046	valid-auc:0.756954
[93]	train-auc:0.919419	valid-auc:0.757093
[94]	train-auc:0.919745	valid-auc:0.757126
[95]	train-auc:0.920152	valid-auc:0.757144
[96]	train-auc:0.920628	valid-auc:0.757156
[97]	train-auc:0.920769	valid-auc:0.757154
[98]	train-auc:0.920974	valid-auc:0.757199
[99]	train-auc:0.921528	valid-auc:0.757157
[100]	train-auc:0.922185	valid-auc:0.757152
[101]	train-auc:0.922569	valid-auc:0.757071
[102]	train-auc:0.923257	valid-auc:0.757361
[103]	train-auc:0.924322	valid-auc:0.757244
[104]	train-auc:0.924526	valid-auc:0.757302
[105]	train-auc:0.924962	valid-auc:0.757381
[106]	train-auc:0.925209	valid-auc:0.757427
[107]	train-auc:0.925515	valid-auc:0.75749
[108]	train-auc:0.926611	valid-auc:0.757499
[109]	train-auc:0.926922	valid-auc:0.757522
[110]	train-auc:0.927077	valid-auc:0.757515
[111]	train-auc:0.927307	valid-auc:0.757553
[112]	train-auc:0.928195	valid-auc:0.757509
[113]	train-auc:0.928397	valid-auc:0.757411
[114]	train-auc:0.929684	valid-auc:0.757556
[115]	train-auc:0.930485	valid-auc:0.757696
[116]	train-auc:0.930648	valid-auc:0.757776
[117]	train-auc:0.931309	valid-auc:0.757822
[118]	train-auc:0.931536	valid-auc:0.757822
[119]	train-auc:0.93183	valid-auc:0.757865
[120]	train-auc:0.932119	valid-auc:0.75782
[121]	train-auc:0.933154	valid-auc:0.757985
[122]	train-auc:0.93387	valid-auc:0.758108
[123]	train-auc:0.93466	valid-auc:0.758142
[124]	train-auc:0.935341	valid-auc:0.758362
[125]	train-auc:0.935589	valid-auc:0.758478
[126]	train-auc:0.935975	valid-auc:0.758631
[127]	train-auc:0.936374	valid-auc:0.758609
[128]	train-auc:0.936959	valid-auc:0.758646
[129]	train-auc:0.937118	valid-auc:0.758661
[130]	train-auc:0.937507	valid-auc:0.758751
[131]	train-auc:0.938043	valid-auc:0.75879
[132]	train-auc:0.938518	valid-auc:0.758859
[133]	train-auc:0.939058	valid-auc:0.758808
[134]	train-auc:0.940058	valid-auc:0.758997
[135]	train-auc:0.940637	valid-auc:0.759034
[136]	train-auc:0.941128	valid-auc:0.759091
[137]	train-auc:0.942043	valid-auc:0.759089
[138]	train-auc:0.942412	valid-auc:0.759163
[139]	train-auc:0.942759	valid-auc:0.759189
[140]	train-auc:0.943019	valid-auc:0.759182
[141]	train-auc:0.943326	valid-auc:0.759143
[142]	train-auc:0.944055	valid-auc:0.759218
[143]	train-auc:0.944855	valid-auc:0.759287
[144]	train-auc:0.945139	valid-auc:0.759233
[145]	train-auc:0.945405	valid-auc:0.759276
[146]	train-auc:0.945882	valid-auc:0.759176
[147]	train-auc:0.946201	valid-auc:0.759164
[148]	train-auc:0.946647	valid-auc:0.759042
[149]	train-auc:0.946942	valid-auc:0.759055
[150]	train-auc:0.94748	valid-auc:0.759278
[151]	train-auc:0.947777	valid-auc:0.759375
[152]	train-auc:0.948096	valid-auc:0.759582
[153]	train-auc:0.948517	valid-auc:0.759625
[154]	train-auc:0.949267	valid-auc:0.759718
[155]	train-auc:0.949661	valid-auc:0.759668
[156]	train-auc:0.950124	valid-auc:0.759749
[157]	train-auc:0.950507	valid-auc:0.759826
[158]	train-auc:0.951087	valid-auc:0.759831
[159]	train-auc:0.951421	valid-auc:0.759819
[160]	train-auc:0.951981	valid-auc:0.759924
[161]	train-auc:0.952808	valid-auc:0.759903
[162]	train-auc:0.953078	valid-auc:0.759899
[163]	train-auc:0.953337	valid-auc:0.759917
[164]	train-auc:0.953653	valid-auc:0.759873
[165]	train-auc:0.954233	valid-auc:0.75999
[166]	train-auc:0.954746	valid-auc:0.760071
[167]	train-auc:0.9552	valid-auc:0.759964
[168]	train-auc:0.955472	valid-auc:0.759903
[169]	train-auc:0.95567	valid-auc:0.759916
[170]	train-auc:0.956073	valid-auc:0.759902
[171]	train-auc:0.956238	valid-auc:0.759835
[172]	train-auc:0.956356	valid-auc:0.759912
[173]	train-auc:0.95686	valid-auc:0.759941
[174]	train-auc:0.957389	valid-auc:0.760156
[175]	train-auc:0.957879	valid-auc:0.760269
[176]	train-auc:0.958115	valid-auc:0.760262
[177]	train-auc:0.958616	valid-auc:0.760241
[178]	train-auc:0.959157	valid-auc:0.760254
[179]	train-auc:0.959677	valid-auc:0.760276
[180]	train-auc:0.960037	valid-auc:0.760178
[181]	train-auc:0.960522	valid-auc:0.760195
[182]	train-auc:0.960684	valid-auc:0.76022
[183]	train-auc:0.961632	valid-auc:0.760393
[184]	train-auc:0.962183	valid-auc:0.760441
[185]	train-auc:0.962347	valid-auc:0.760461
[186]	train-auc:0.962522	valid-auc:0.760417
[187]	train-auc:0.962787	valid-auc:0.760363
[188]	train-auc:0.96334	valid-auc:0.76044
[189]	train-auc:0.963584	valid-auc:0.760424
[190]	train-auc:0.963903	valid-auc:0.760425
[191]	train-auc:0.963974	valid-auc:0.76038
[192]	train-auc:0.96433	valid-auc:0.760416
[193]	train-auc:0.964509	valid-auc:0.760483
[194]	train-auc:0.96502	valid-auc:0.760493
[195]	train-auc:0.96524	valid-auc:0.760445
[196]	train-auc:0.965811	valid-auc:0.76036
[197]	train-auc:0.966191	valid-auc:0.760469
[198]	train-auc:0.966651	valid-auc:0.760506
[199]	train-auc:0.967002	valid-auc:0.760494
[200]	train-auc:0.967373	valid-auc:0.760526
[201]	train-auc:0.967495	valid-auc:0.760539
[202]	train-auc:0.967966	valid-auc:0.760395
[203]	train-auc:0.968138	valid-auc:0.760333
[204]	train-auc:0.968574	valid-auc:0.760306
[205]	train-auc:0.969039	valid-auc:0.760308
[206]	train-auc:0.969197	valid-auc:0.760324
[207]	train-auc:0.969422	valid-auc:0.760229
[208]	train-auc:0.969654	valid-auc:0.760243
[209]	train-auc:0.969742	valid-auc:0.76024
[210]	train-auc:0.970202	valid-auc:0.760244
[211]	train-auc:0.97043	valid-auc:0.760193
[212]	train-auc:0.970554	valid-auc:0.760193
[213]	train-auc:0.970979	valid-auc:0.760306
[214]	train-auc:0.971289	valid-auc:0.76035
[215]	train-auc:0.971458	valid-auc:0.760288
[216]	train-auc:0.971763	valid-auc:0.76045
[217]	train-auc:0.972089	valid-auc:0.760382
[218]	train-auc:0.972267	valid-auc:0.76044
[219]	train-auc:0.972358	valid-auc:0.760457
[220]	train-auc:0.972474	valid-auc:0.760501
[221]	train-auc:0.972604	valid-auc:0.760488
[222]	train-auc:0.972788	valid-auc:0.760548
[223]	train-auc:0.973153	valid-auc:0.760567
[224]	train-auc:0.973569	valid-auc:0.760509
[225]	train-auc:0.973789	valid-auc:0.760554
[226]	train-auc:0.973817	valid-auc:0.760541
[227]	train-auc:0.974163	valid-auc:0.760596
[228]	train-auc:0.974425	valid-auc:0.760748
[229]	train-auc:0.974566	valid-auc:0.760746
[230]	train-auc:0.974778	valid-auc:0.760804
[231]	train-auc:0.975063	valid-auc:0.760758
[232]	train-auc:0.975392	valid-auc:0.760817
[233]	train-auc:0.975629	valid-auc:0.76087
[234]	train-auc:0.975726	valid-auc:0.760847
[235]	train-auc:0.975942	valid-auc:0.760832
[236]	train-auc:0.976101	valid-auc:0.760813
[237]	train-auc:0.976497	valid-auc:0.760875
[238]	train-auc:0.976867	valid-auc:0.76082
[239]	train-auc:0.976979	valid-auc:0.76077
[240]	train-auc:0.977109	valid-auc:0.760779
[241]	train-auc:0.977374	valid-auc:0.760753
[242]	train-auc:0.977527	valid-auc:0.760635
[243]	train-auc:0.977662	valid-auc:0.760608
[244]	train-auc:0.978137	valid-auc:0.760493
[245]	train-auc:0.978627	valid-auc:0.760364
[246]	train-auc:0.978848	valid-auc:0.760324
[247]	train-auc:0.979057	valid-auc:0.760374
[248]	train-auc:0.979245	valid-auc:0.760393
[249]	train-auc:0.979468	valid-auc:0.760367
[250]	train-auc:0.979657	valid-auc:0.76038
[251]	train-auc:0.979759	valid-auc:0.760424
[252]	train-auc:0.979978	valid-auc:0.760432
[253]	train-auc:0.980308	valid-auc:0.760525
[254]	train-auc:0.980587	valid-auc:0.760321
[255]	train-auc:0.980719	valid-auc:0.760314
[256]	train-auc:0.981054	valid-auc:0.760483
[257]	train-auc:0.981189	valid-auc:0.760523
[258]	train-auc:0.981389	valid-auc:0.760533
[259]	train-auc:0.981547	valid-auc:0.760509
[260]	train-auc:0.981867	valid-auc:0.760414
[261]	train-auc:0.98207	valid-auc:0.760333
[262]	train-auc:0.982263	valid-auc:0.760391
[263]	train-auc:0.98248	valid-auc:0.760357
[264]	train-auc:0.982623	valid-auc:0.760338
[265]	train-auc:0.982686	valid-auc:0.760422
[266]	train-auc:0.982876	valid-auc:0.760375
[267]	train-auc:0.983005	valid-auc:0.760353
[268]	train-auc:0.983202	valid-auc:0.760393
[269]	train-auc:0.983242	valid-auc:0.760334
[270]	train-auc:0.983344	valid-auc:0.760343
[271]	train-auc:0.983467	valid-auc:0.760285
[272]	train-auc:0.983571	valid-auc:0.760221
[273]	train-auc:0.983812	valid-auc:0.760112
[274]	train-auc:0.983946	valid-auc:0.760133
[275]	train-auc:0.984	valid-auc:0.760183
[276]	train-auc:0.984189	valid-auc:0.760103
[277]	train-auc:0.984336	valid-auc:0.760098
[278]	train-auc:0.984416	valid-auc:0.759994
[279]	train-auc:0.984484	valid-auc:0.759935
[280]	train-auc:0.984753	valid-auc:0.759904
[281]	train-auc:0.984852	valid-auc:0.759908
[282]	train-auc:0.985081	valid-auc:0.759841
[283]	train-auc:0.985311	valid-auc:0.759851
[284]	train-auc:0.985465	valid-auc:0.759923
[285]	train-auc:0.985545	valid-auc:0.759916
[286]	train-auc:0.985623	valid-auc:0.759836
[287]	train-auc:0.985727	valid-auc:0.759862
Stopping. Best iteration:
[237]	train-auc:0.976497	valid-auc:0.760875

[mlcrate] Finished training fold 4 - took 1m06s - running score 0.7530346
[mlcrate] Running fold 5, 62698 train samples, 10449 validation samples
[0]	train-auc:0.789441	valid-auc:0.706847
Multiple eval metrics have been passed: 'valid-auc' will be used for early stopping.

Will train until valid-auc hasn't improved in 50 rounds.
[1]	train-auc:0.810015	valid-auc:0.722529
[2]	train-auc:0.819567	valid-auc:0.727926
[3]	train-auc:0.824641	valid-auc:0.730233
[4]	train-auc:0.827786	valid-auc:0.731287
[5]	train-auc:0.831378	valid-auc:0.731344
[6]	train-auc:0.833799	valid-auc:0.731674
[7]	train-auc:0.83608	valid-auc:0.732668
[8]	train-auc:0.838455	valid-auc:0.733878
[9]	train-auc:0.84052	valid-auc:0.734472
[10]	train-auc:0.842169	valid-auc:0.735716
[11]	train-auc:0.843787	valid-auc:0.735957
[12]	train-auc:0.844904	valid-auc:0.736206
[13]	train-auc:0.846642	valid-auc:0.736167
[14]	train-auc:0.848419	valid-auc:0.736169
[15]	train-auc:0.849771	valid-auc:0.736206
[16]	train-auc:0.850973	valid-auc:0.735834
[17]	train-auc:0.852701	valid-auc:0.736523
[18]	train-auc:0.853787	valid-auc:0.737161
[19]	train-auc:0.855652	valid-auc:0.737548
[20]	train-auc:0.857809	valid-auc:0.737963
[21]	train-auc:0.859172	valid-auc:0.737944
[22]	train-auc:0.860133	valid-auc:0.738213
[23]	train-auc:0.860936	valid-auc:0.738208
[24]	train-auc:0.862178	valid-auc:0.738181
[25]	train-auc:0.863063	valid-auc:0.738661
[26]	train-auc:0.864378	valid-auc:0.739129
[27]	train-auc:0.865714	valid-auc:0.739378
[28]	train-auc:0.867173	valid-auc:0.739741
[29]	train-auc:0.868402	valid-auc:0.739819
[30]	train-auc:0.869507	valid-auc:0.739972
[31]	train-auc:0.870712	valid-auc:0.739961
[32]	train-auc:0.871586	valid-auc:0.74003
[33]	train-auc:0.872537	valid-auc:0.740099
[34]	train-auc:0.873476	valid-auc:0.740324
[35]	train-auc:0.874705	valid-auc:0.740325
[36]	train-auc:0.876076	valid-auc:0.740357
[37]	train-auc:0.877096	valid-auc:0.740588
[38]	train-auc:0.878217	valid-auc:0.740707
[39]	train-auc:0.879508	valid-auc:0.740554
[40]	train-auc:0.880189	valid-auc:0.740735
[41]	train-auc:0.881578	valid-auc:0.7409
[42]	train-auc:0.882904	valid-auc:0.741028
[43]	train-auc:0.884062	valid-auc:0.74104
[44]	train-auc:0.884868	valid-auc:0.741103
[45]	train-auc:0.885794	valid-auc:0.741108
[46]	train-auc:0.88656	valid-auc:0.740947
[47]	train-auc:0.887294	valid-auc:0.741047
[48]	train-auc:0.888392	valid-auc:0.7412
[49]	train-auc:0.889007	valid-auc:0.741292
[50]	train-auc:0.890104	valid-auc:0.741604
[51]	train-auc:0.891122	valid-auc:0.741537
[52]	train-auc:0.891792	valid-auc:0.741679
[53]	train-auc:0.892672	valid-auc:0.741762
[54]	train-auc:0.893557	valid-auc:0.741735
[55]	train-auc:0.895055	valid-auc:0.742192
[56]	train-auc:0.895791	valid-auc:0.742357
[57]	train-auc:0.896614	valid-auc:0.742432
[58]	train-auc:0.897293	valid-auc:0.742386
[59]	train-auc:0.897773	valid-auc:0.742368
[60]	train-auc:0.898489	valid-auc:0.74227
[61]	train-auc:0.899012	valid-auc:0.742159
[62]	train-auc:0.899513	valid-auc:0.742115
[63]	train-auc:0.900264	valid-auc:0.742095
[64]	train-auc:0.900959	valid-auc:0.742282
[65]	train-auc:0.90148	valid-auc:0.742396
[66]	train-auc:0.902121	valid-auc:0.742411
[67]	train-auc:0.902612	valid-auc:0.742679
[68]	train-auc:0.903729	valid-auc:0.742849
[69]	train-auc:0.904555	valid-auc:0.742875
[70]	train-auc:0.904973	valid-auc:0.742955
[71]	train-auc:0.905649	valid-auc:0.743008
[72]	train-auc:0.906314	valid-auc:0.743112
[73]	train-auc:0.907166	valid-auc:0.74322
[74]	train-auc:0.907415	valid-auc:0.743309
[75]	train-auc:0.908213	valid-auc:0.743369
[76]	train-auc:0.908586	valid-auc:0.743398
[77]	train-auc:0.90918	valid-auc:0.74351
[78]	train-auc:0.909984	valid-auc:0.743452
[79]	train-auc:0.910865	valid-auc:0.743506
[80]	train-auc:0.911149	valid-auc:0.743466
[81]	train-auc:0.911826	valid-auc:0.743608
[82]	train-auc:0.912483	valid-auc:0.74353
[83]	train-auc:0.912942	valid-auc:0.743739
[84]	train-auc:0.913611	valid-auc:0.743828
[85]	train-auc:0.914139	valid-auc:0.74391
[86]	train-auc:0.914739	valid-auc:0.743881
[87]	train-auc:0.915494	valid-auc:0.744009
[88]	train-auc:0.915647	valid-auc:0.743909
[89]	train-auc:0.915812	valid-auc:0.743921
[90]	train-auc:0.916262	valid-auc:0.74382
[91]	train-auc:0.91684	valid-auc:0.743937
[92]	train-auc:0.917387	valid-auc:0.744034
[93]	train-auc:0.917644	valid-auc:0.744012
[94]	train-auc:0.918615	valid-auc:0.744013
[95]	train-auc:0.919242	valid-auc:0.744098
[96]	train-auc:0.919865	valid-auc:0.744209
[97]	train-auc:0.92022	valid-auc:0.744169
[98]	train-auc:0.92037	valid-auc:0.744208
[99]	train-auc:0.921139	valid-auc:0.744205
[100]	train-auc:0.921714	valid-auc:0.744141
[101]	train-auc:0.922177	valid-auc:0.744304
[102]	train-auc:0.922577	valid-auc:0.74434
[103]	train-auc:0.922982	valid-auc:0.744335
[104]	train-auc:0.923644	valid-auc:0.744412
[105]	train-auc:0.92448	valid-auc:0.744361
[106]	train-auc:0.925596	valid-auc:0.744509
[107]	train-auc:0.925965	valid-auc:0.744535
[108]	train-auc:0.926125	valid-auc:0.744628
[109]	train-auc:0.926355	valid-auc:0.744644
[110]	train-auc:0.926931	valid-auc:0.744872
[111]	train-auc:0.927285	valid-auc:0.745017
[112]	train-auc:0.927677	valid-auc:0.745135
[113]	train-auc:0.928492	valid-auc:0.745151
[114]	train-auc:0.929684	valid-auc:0.745426
[115]	train-auc:0.929886	valid-auc:0.745419
[116]	train-auc:0.930132	valid-auc:0.745418
[117]	train-auc:0.930377	valid-auc:0.745446
[118]	train-auc:0.930932	valid-auc:0.745449
[119]	train-auc:0.931026	valid-auc:0.74541
[120]	train-auc:0.931701	valid-auc:0.745433
[121]	train-auc:0.931851	valid-auc:0.745501
[122]	train-auc:0.93218	valid-auc:0.745533
[123]	train-auc:0.93233	valid-auc:0.745514
[124]	train-auc:0.932539	valid-auc:0.745566
[125]	train-auc:0.932783	valid-auc:0.745725
[126]	train-auc:0.933175	valid-auc:0.745759
[127]	train-auc:0.933743	valid-auc:0.745854
[128]	train-auc:0.934197	valid-auc:0.745922
[129]	train-auc:0.934637	valid-auc:0.745998
[130]	train-auc:0.935506	valid-auc:0.746204
[131]	train-auc:0.93601	valid-auc:0.74634
[132]	train-auc:0.936509	valid-auc:0.746388
[133]	train-auc:0.93714	valid-auc:0.746308
[134]	train-auc:0.937287	valid-auc:0.746234
[135]	train-auc:0.937793	valid-auc:0.746227
[136]	train-auc:0.938504	valid-auc:0.746318
[137]	train-auc:0.938853	valid-auc:0.746313
[138]	train-auc:0.939039	valid-auc:0.746328
[139]	train-auc:0.939317	valid-auc:0.746394
[140]	train-auc:0.939691	valid-auc:0.74636
[141]	train-auc:0.940376	valid-auc:0.746425
[142]	train-auc:0.940786	valid-auc:0.746548
[143]	train-auc:0.941027	valid-auc:0.746591
[144]	train-auc:0.942139	valid-auc:0.746537
[145]	train-auc:0.942716	valid-auc:0.746543
[146]	train-auc:0.943103	valid-auc:0.746576
[147]	train-auc:0.943614	valid-auc:0.746755
[148]	train-auc:0.944327	valid-auc:0.746953
[149]	train-auc:0.944634	valid-auc:0.747023
[150]	train-auc:0.944967	valid-auc:0.747138
[151]	train-auc:0.945042	valid-auc:0.747117
[152]	train-auc:0.945345	valid-auc:0.747128
[153]	train-auc:0.94558	valid-auc:0.747198
[154]	train-auc:0.945733	valid-auc:0.747168
[155]	train-auc:0.946344	valid-auc:0.747162
[156]	train-auc:0.947057	valid-auc:0.747328
[157]	train-auc:0.947117	valid-auc:0.747358
[158]	train-auc:0.947339	valid-auc:0.747323
[159]	train-auc:0.94806	valid-auc:0.747521
[160]	train-auc:0.948475	valid-auc:0.747583
[161]	train-auc:0.949119	valid-auc:0.74774
[162]	train-auc:0.949626	valid-auc:0.747745
[163]	train-auc:0.949859	valid-auc:0.747709
[164]	train-auc:0.950433	valid-auc:0.747704
[165]	train-auc:0.95095	valid-auc:0.747737
[166]	train-auc:0.951285	valid-auc:0.747737
[167]	train-auc:0.951634	valid-auc:0.747711
[168]	train-auc:0.951975	valid-auc:0.747601
[169]	train-auc:0.952439	valid-auc:0.747538
[170]	train-auc:0.953121	valid-auc:0.747593
[171]	train-auc:0.953483	valid-auc:0.747757
[172]	train-auc:0.953644	valid-auc:0.747815
[173]	train-auc:0.953961	valid-auc:0.747837
[174]	train-auc:0.954147	valid-auc:0.747902
[175]	train-auc:0.954522	valid-auc:0.747819
[176]	train-auc:0.954745	valid-auc:0.747962
[177]	train-auc:0.955053	valid-auc:0.748158
[178]	train-auc:0.9553	valid-auc:0.748378
[179]	train-auc:0.956015	valid-auc:0.748269
[180]	train-auc:0.956492	valid-auc:0.74831
[181]	train-auc:0.95675	valid-auc:0.748341
[182]	train-auc:0.957018	valid-auc:0.748374
[183]	train-auc:0.957185	valid-auc:0.748318
[184]	train-auc:0.957716	valid-auc:0.748263
[185]	train-auc:0.958049	valid-auc:0.748283
[186]	train-auc:0.958713	valid-auc:0.748581
[187]	train-auc:0.958885	valid-auc:0.748623
[188]	train-auc:0.959361	valid-auc:0.748699
[189]	train-auc:0.959874	valid-auc:0.748565
[190]	train-auc:0.960159	valid-auc:0.748488
[191]	train-auc:0.960228	valid-auc:0.74846
[192]	train-auc:0.960671	valid-auc:0.748458
[193]	train-auc:0.961349	valid-auc:0.748566
[194]	train-auc:0.961727	valid-auc:0.748457
[195]	train-auc:0.962173	valid-auc:0.748458
[196]	train-auc:0.962651	valid-auc:0.748411
[197]	train-auc:0.962836	valid-auc:0.748421
[198]	train-auc:0.963731	valid-auc:0.748543
[199]	train-auc:0.963786	valid-auc:0.748564
[200]	train-auc:0.964129	valid-auc:0.748677
[201]	train-auc:0.964433	valid-auc:0.748718
[202]	train-auc:0.964724	valid-auc:0.74871
[203]	train-auc:0.965048	valid-auc:0.748646
[204]	train-auc:0.965384	valid-auc:0.748575
[205]	train-auc:0.965715	valid-auc:0.748596
[206]	train-auc:0.965903	valid-auc:0.748638
[207]	train-auc:0.966201	valid-auc:0.748636
[208]	train-auc:0.966625	valid-auc:0.748677
[209]	train-auc:0.966981	valid-auc:0.748745
[210]	train-auc:0.967375	valid-auc:0.748696
[211]	train-auc:0.96771	valid-auc:0.748703
[212]	train-auc:0.968004	valid-auc:0.748824
[213]	train-auc:0.968633	valid-auc:0.748817
[214]	train-auc:0.969039	valid-auc:0.748942
[215]	train-auc:0.969145	valid-auc:0.748916
[216]	train-auc:0.969268	valid-auc:0.748841
[217]	train-auc:0.96966	valid-auc:0.748807
[218]	train-auc:0.969755	valid-auc:0.748804
[219]	train-auc:0.969797	valid-auc:0.748818
[220]	train-auc:0.970075	valid-auc:0.748763
[221]	train-auc:0.970207	valid-auc:0.748749
[222]	train-auc:0.970471	valid-auc:0.748659
[223]	train-auc:0.970568	valid-auc:0.748648
[224]	train-auc:0.970931	valid-auc:0.748668
[225]	train-auc:0.971473	valid-auc:0.748673
[226]	train-auc:0.971982	valid-auc:0.748792
[227]	train-auc:0.97225	valid-auc:0.748866
[228]	train-auc:0.972302	valid-auc:0.748886
[229]	train-auc:0.972495	valid-auc:0.748771
[230]	train-auc:0.972731	valid-auc:0.748804
[231]	train-auc:0.972963	valid-auc:0.748675
[232]	train-auc:0.973041	valid-auc:0.748672
[233]	train-auc:0.973292	valid-auc:0.748871
[234]	train-auc:0.973654	valid-auc:0.748916
[235]	train-auc:0.973692	valid-auc:0.748927
[236]	train-auc:0.973933	valid-auc:0.748896
[237]	train-auc:0.974132	valid-auc:0.748975
[238]	train-auc:0.974314	valid-auc:0.749027
[239]	train-auc:0.974498	valid-auc:0.748829
[240]	train-auc:0.974844	valid-auc:0.748715
[241]	train-auc:0.975144	valid-auc:0.74872
[242]	train-auc:0.975535	valid-auc:0.748756
[243]	train-auc:0.975754	valid-auc:0.748781
[244]	train-auc:0.976073	valid-auc:0.748827
[245]	train-auc:0.976304	valid-auc:0.748767
[246]	train-auc:0.976427	valid-auc:0.748779
[247]	train-auc:0.976668	valid-auc:0.748799
[248]	train-auc:0.977105	valid-auc:0.748891
[249]	train-auc:0.977158	valid-auc:0.748944
[250]	train-auc:0.977235	valid-auc:0.748895
[251]	train-auc:0.977506	valid-auc:0.748752
[252]	train-auc:0.977628	valid-auc:0.748781
[253]	train-auc:0.977862	valid-auc:0.748906
[254]	train-auc:0.977924	valid-auc:0.748948
[255]	train-auc:0.978123	valid-auc:0.748925
[256]	train-auc:0.978234	valid-auc:0.74895
[257]	train-auc:0.978553	valid-auc:0.749018
[258]	train-auc:0.978838	valid-auc:0.748965
[259]	train-auc:0.979064	valid-auc:0.748929
[260]	train-auc:0.979227	valid-auc:0.74895
[261]	train-auc:0.97949	valid-auc:0.748919
[262]	train-auc:0.979668	valid-auc:0.748973
[263]	train-auc:0.979994	valid-auc:0.7489
[264]	train-auc:0.980213	valid-auc:0.748882
[265]	train-auc:0.980458	valid-auc:0.74894
[266]	train-auc:0.980626	valid-auc:0.74901
[267]	train-auc:0.980676	valid-auc:0.749016
[268]	train-auc:0.9808	valid-auc:0.74906
[269]	train-auc:0.980986	valid-auc:0.748982
[270]	train-auc:0.981187	valid-auc:0.748984
[271]	train-auc:0.981273	valid-auc:0.749001
[272]	train-auc:0.981485	valid-auc:0.749009
[273]	train-auc:0.981538	valid-auc:0.748954
[274]	train-auc:0.981675	valid-auc:0.748934
[275]	train-auc:0.981949	valid-auc:0.74889
[276]	train-auc:0.982149	valid-auc:0.749007
[277]	train-auc:0.982414	valid-auc:0.749059
[278]	train-auc:0.982668	valid-auc:0.74907
[279]	train-auc:0.982773	valid-auc:0.749013
[280]	train-auc:0.982886	valid-auc:0.749004
[281]	train-auc:0.983127	valid-auc:0.748972
[282]	train-auc:0.983407	valid-auc:0.74895
[283]	train-auc:0.98359	valid-auc:0.749
[284]	train-auc:0.983783	valid-auc:0.749032
[285]	train-auc:0.983972	valid-auc:0.749078
[286]	train-auc:0.984214	valid-auc:0.749096
[287]	train-auc:0.984474	valid-auc:0.749134
[288]	train-auc:0.984524	valid-auc:0.74913
[289]	train-auc:0.984616	valid-auc:0.749148
[290]	train-auc:0.984828	valid-auc:0.749113
[291]	train-auc:0.984933	valid-auc:0.749053
[292]	train-auc:0.985045	valid-auc:0.749001
[293]	train-auc:0.985325	valid-auc:0.749022
[294]	train-auc:0.985464	valid-auc:0.749026
[295]	train-auc:0.985683	valid-auc:0.749162
[296]	train-auc:0.985869	valid-auc:0.749214
[297]	train-auc:0.986017	valid-auc:0.749257
[298]	train-auc:0.986142	valid-auc:0.749179
[299]	train-auc:0.9863	valid-auc:0.749163
[300]	train-auc:0.986434	valid-auc:0.749147
[301]	train-auc:0.986555	valid-auc:0.749054
[302]	train-auc:0.986747	valid-auc:0.749145
[303]	train-auc:0.986926	valid-auc:0.749167
[304]	train-auc:0.986986	valid-auc:0.749139
[305]	train-auc:0.98712	valid-auc:0.749267
[306]	train-auc:0.987171	valid-auc:0.749318
[307]	train-auc:0.98724	valid-auc:0.749317
[308]	train-auc:0.987409	valid-auc:0.749343
[309]	train-auc:0.987518	valid-auc:0.749344
[310]	train-auc:0.987617	valid-auc:0.749335
[311]	train-auc:0.98777	valid-auc:0.749392
[312]	train-auc:0.987878	valid-auc:0.749298
[313]	train-auc:0.987993	valid-auc:0.749225
[314]	train-auc:0.988175	valid-auc:0.749121
[315]	train-auc:0.988261	valid-auc:0.749128
[316]	train-auc:0.988536	valid-auc:0.749148
[317]	train-auc:0.988626	valid-auc:0.749204
[318]	train-auc:0.988957	valid-auc:0.74927
[319]	train-auc:0.989148	valid-auc:0.749388
[320]	train-auc:0.989197	valid-auc:0.749346
[321]	train-auc:0.989231	valid-auc:0.74934
[322]	train-auc:0.9893	valid-auc:0.749346
[323]	train-auc:0.989402	valid-auc:0.749318
[324]	train-auc:0.989406	valid-auc:0.749306
[325]	train-auc:0.989493	valid-auc:0.749327
[326]	train-auc:0.989626	valid-auc:0.749426
[327]	train-auc:0.989763	valid-auc:0.749439
[328]	train-auc:0.989978	valid-auc:0.749578
[329]	train-auc:0.990052	valid-auc:0.749644
[330]	train-auc:0.990079	valid-auc:0.749649
[331]	train-auc:0.99016	valid-auc:0.7496
[332]	train-auc:0.990199	valid-auc:0.749662
[333]	train-auc:0.990292	valid-auc:0.749703
[334]	train-auc:0.990381	valid-auc:0.749633
[335]	train-auc:0.990397	valid-auc:0.74961
[336]	train-auc:0.990525	valid-auc:0.74949
[337]	train-auc:0.990616	valid-auc:0.749496
[338]	train-auc:0.990699	valid-auc:0.749471
[339]	train-auc:0.99077	valid-auc:0.749397
[340]	train-auc:0.990836	valid-auc:0.749384
[341]	train-auc:0.990925	valid-auc:0.749386
[342]	train-auc:0.991011	valid-auc:0.749295
[343]	train-auc:0.991086	valid-auc:0.749324
[344]	train-auc:0.991144	valid-auc:0.749356
[345]	train-auc:0.99128	valid-auc:0.749414
[346]	train-auc:0.991441	valid-auc:0.749325
[347]	train-auc:0.991559	valid-auc:0.749255
[348]	train-auc:0.991635	valid-auc:0.749204
[349]	train-auc:0.991815	valid-auc:0.749219
[350]	train-auc:0.991913	valid-auc:0.749313
[351]	train-auc:0.991951	valid-auc:0.749355
[352]	train-auc:0.992034	valid-auc:0.749376
[353]	train-auc:0.992092	valid-auc:0.749309
[354]	train-auc:0.992208	valid-auc:0.749295
[355]	train-auc:0.992284	valid-auc:0.749298
[356]	train-auc:0.992407	valid-auc:0.74922
[357]	train-auc:0.992449	valid-auc:0.749226
[358]	train-auc:0.992491	valid-auc:0.749162
[359]	train-auc:0.992566	valid-auc:0.749045
[360]	train-auc:0.992671	valid-auc:0.749008
[361]	train-auc:0.992831	valid-auc:0.748805
[362]	train-auc:0.992918	valid-auc:0.748743
[363]	train-auc:0.992993	valid-auc:0.748823
[364]	train-auc:0.993077	valid-auc:0.748714
[365]	train-auc:0.993196	valid-auc:0.748862
[366]	train-auc:0.993292	valid-auc:0.748834
[367]	train-auc:0.99335	valid-auc:0.748915
[368]	train-auc:0.993432	valid-auc:0.748939
[369]	train-auc:0.993435	valid-auc:0.748928
[370]	train-auc:0.993467	valid-auc:0.74894
[371]	train-auc:0.993538	valid-auc:0.748883
[372]	train-auc:0.993594	valid-auc:0.748908
[373]	train-auc:0.993681	valid-auc:0.748926
[374]	train-auc:0.993742	valid-auc:0.748884
[375]	train-auc:0.993827	valid-auc:0.748852
[376]	train-auc:0.993915	valid-auc:0.748903
[377]	train-auc:0.993967	valid-auc:0.748924
[378]	train-auc:0.994012	valid-auc:0.748894
[379]	train-auc:0.994073	valid-auc:0.748943
[380]	train-auc:0.994168	valid-auc:0.748916
[381]	train-auc:0.994216	valid-auc:0.748802
[382]	train-auc:0.994246	valid-auc:0.748757
[383]	train-auc:0.994278	valid-auc:0.748801
Stopping. Best iteration:
[333]	train-auc:0.990292	valid-auc:0.749703

[mlcrate] Finished training fold 5 - took 1m27s - running score 0.7524793333333334
[mlcrate] Running fold 6, 62699 train samples, 10448 validation samples
[0]	train-auc:0.789036	valid-auc:0.715571
Multiple eval metrics have been passed: 'valid-auc' will be used for early stopping.

Will train until valid-auc hasn't improved in 50 rounds.
[1]	train-auc:0.809961	valid-auc:0.725421
[2]	train-auc:0.818555	valid-auc:0.733701
[3]	train-auc:0.823251	valid-auc:0.737898
[4]	train-auc:0.827406	valid-auc:0.739996
[5]	train-auc:0.830234	valid-auc:0.740876
[6]	train-auc:0.833019	valid-auc:0.742203
[7]	train-auc:0.835359	valid-auc:0.742536
[8]	train-auc:0.837443	valid-auc:0.743964
[9]	train-auc:0.83989	valid-auc:0.744304
[10]	train-auc:0.841056	valid-auc:0.745086
[11]	train-auc:0.842262	valid-auc:0.745343
[12]	train-auc:0.844033	valid-auc:0.745649
[13]	train-auc:0.845055	valid-auc:0.746013
[14]	train-auc:0.846656	valid-auc:0.746593
[15]	train-auc:0.848289	valid-auc:0.746855
[16]	train-auc:0.849862	valid-auc:0.747258
[17]	train-auc:0.85152	valid-auc:0.747558
[18]	train-auc:0.852972	valid-auc:0.748176
[19]	train-auc:0.854345	valid-auc:0.748654
[20]	train-auc:0.855677	valid-auc:0.748734
[21]	train-auc:0.856872	valid-auc:0.748677
[22]	train-auc:0.858363	valid-auc:0.748939
[23]	train-auc:0.85989	valid-auc:0.749187
[24]	train-auc:0.86119	valid-auc:0.749108
[25]	train-auc:0.861996	valid-auc:0.74921
[26]	train-auc:0.862953	valid-auc:0.749335
[27]	train-auc:0.863997	valid-auc:0.749466
[28]	train-auc:0.865206	valid-auc:0.749729
[29]	train-auc:0.866524	valid-auc:0.749983
[30]	train-auc:0.86764	valid-auc:0.750197
[31]	train-auc:0.869213	valid-auc:0.750545
[32]	train-auc:0.870427	valid-auc:0.75083
[33]	train-auc:0.871484	valid-auc:0.751037
[34]	train-auc:0.872611	valid-auc:0.751104
[35]	train-auc:0.874272	valid-auc:0.751214
[36]	train-auc:0.875472	valid-auc:0.751446
[37]	train-auc:0.87653	valid-auc:0.751703
[38]	train-auc:0.877488	valid-auc:0.75178
[39]	train-auc:0.878401	valid-auc:0.751719
[40]	train-auc:0.879333	valid-auc:0.751835
[41]	train-auc:0.880365	valid-auc:0.75207
[42]	train-auc:0.881817	valid-auc:0.752086
[43]	train-auc:0.882724	valid-auc:0.752197
[44]	train-auc:0.883815	valid-auc:0.752251
[45]	train-auc:0.884681	valid-auc:0.75233
[46]	train-auc:0.885533	valid-auc:0.752576
[47]	train-auc:0.886667	valid-auc:0.752468
[48]	train-auc:0.887795	valid-auc:0.752549
[49]	train-auc:0.888727	valid-auc:0.752612
[50]	train-auc:0.889305	valid-auc:0.752636
[51]	train-auc:0.890077	valid-auc:0.752687
[52]	train-auc:0.891342	valid-auc:0.752724
[53]	train-auc:0.892145	valid-auc:0.752755
[54]	train-auc:0.892795	valid-auc:0.752814
[55]	train-auc:0.893261	valid-auc:0.75286
[56]	train-auc:0.893845	valid-auc:0.752945
[57]	train-auc:0.894818	valid-auc:0.752716
[58]	train-auc:0.895236	valid-auc:0.752651
[59]	train-auc:0.895953	valid-auc:0.752646
[60]	train-auc:0.896533	valid-auc:0.752837
[61]	train-auc:0.897091	valid-auc:0.752794
[62]	train-auc:0.897612	valid-auc:0.752827
[63]	train-auc:0.898888	valid-auc:0.753075
[64]	train-auc:0.899831	valid-auc:0.753108
[65]	train-auc:0.900498	valid-auc:0.753197
[66]	train-auc:0.901159	valid-auc:0.753192
[67]	train-auc:0.901676	valid-auc:0.753224
[68]	train-auc:0.902663	valid-auc:0.753325
[69]	train-auc:0.903184	valid-auc:0.753403
[70]	train-auc:0.904179	valid-auc:0.753334
[71]	train-auc:0.904519	valid-auc:0.753402
[72]	train-auc:0.905089	valid-auc:0.753291
[73]	train-auc:0.905907	valid-auc:0.753653
[74]	train-auc:0.906749	valid-auc:0.75407
[75]	train-auc:0.907884	valid-auc:0.754142
[76]	train-auc:0.908353	valid-auc:0.754191
[77]	train-auc:0.908783	valid-auc:0.754338
[78]	train-auc:0.909167	valid-auc:0.754446
[79]	train-auc:0.909955	valid-auc:0.754303
[80]	train-auc:0.910532	valid-auc:0.754466
[81]	train-auc:0.910875	valid-auc:0.754449
[82]	train-auc:0.911225	valid-auc:0.754505
[83]	train-auc:0.911952	valid-auc:0.754494
[84]	train-auc:0.912887	valid-auc:0.754465
[85]	train-auc:0.913659	valid-auc:0.754569
[86]	train-auc:0.91438	valid-auc:0.754609
[87]	train-auc:0.914942	valid-auc:0.754619
[88]	train-auc:0.915556	valid-auc:0.754596
[89]	train-auc:0.915997	valid-auc:0.754529
[90]	train-auc:0.916206	valid-auc:0.754545
[91]	train-auc:0.916714	valid-auc:0.754642
[92]	train-auc:0.917161	valid-auc:0.754743
[93]	train-auc:0.918228	valid-auc:0.754799
[94]	train-auc:0.918599	valid-auc:0.754779
[95]	train-auc:0.919284	valid-auc:0.7549
[96]	train-auc:0.919696	valid-auc:0.755009
[97]	train-auc:0.921003	valid-auc:0.75494
[98]	train-auc:0.921407	valid-auc:0.754945
[99]	train-auc:0.922076	valid-auc:0.755038
[100]	train-auc:0.922783	valid-auc:0.755095
[101]	train-auc:0.923262	valid-auc:0.755046
[102]	train-auc:0.923559	valid-auc:0.755035
[103]	train-auc:0.924053	valid-auc:0.755005
[104]	train-auc:0.924525	valid-auc:0.755004
[105]	train-auc:0.925016	valid-auc:0.755092
[106]	train-auc:0.925591	valid-auc:0.755116
[107]	train-auc:0.925745	valid-auc:0.755067
[108]	train-auc:0.926079	valid-auc:0.75503
[109]	train-auc:0.926656	valid-auc:0.755067
[110]	train-auc:0.92727	valid-auc:0.755138
[111]	train-auc:0.927757	valid-auc:0.7553
[112]	train-auc:0.928534	valid-auc:0.755373
[113]	train-auc:0.929011	valid-auc:0.755385
[114]	train-auc:0.929285	valid-auc:0.755321
[115]	train-auc:0.930142	valid-auc:0.755584
[116]	train-auc:0.930383	valid-auc:0.755489
[117]	train-auc:0.930721	valid-auc:0.755408
[118]	train-auc:0.931278	valid-auc:0.75541
[119]	train-auc:0.93195	valid-auc:0.755468
[120]	train-auc:0.932746	valid-auc:0.755656
[121]	train-auc:0.933114	valid-auc:0.75576
[122]	train-auc:0.933977	valid-auc:0.756079
[123]	train-auc:0.934123	valid-auc:0.756074
[124]	train-auc:0.934464	valid-auc:0.756019
[125]	train-auc:0.935021	valid-auc:0.755945
[126]	train-auc:0.9355	valid-auc:0.755914
[127]	train-auc:0.936138	valid-auc:0.755886
[128]	train-auc:0.937291	valid-auc:0.756039
[129]	train-auc:0.93765	valid-auc:0.756011
[130]	train-auc:0.93805	valid-auc:0.755974
[131]	train-auc:0.938856	valid-auc:0.756222
[132]	train-auc:0.939568	valid-auc:0.756099
[133]	train-auc:0.939851	valid-auc:0.756147
[134]	train-auc:0.940766	valid-auc:0.756347
[135]	train-auc:0.94114	valid-auc:0.756372
[136]	train-auc:0.941435	valid-auc:0.756385
[137]	train-auc:0.94175	valid-auc:0.756473
[138]	train-auc:0.942242	valid-auc:0.75651
[139]	train-auc:0.942553	valid-auc:0.756618
[140]	train-auc:0.943015	valid-auc:0.756607
[141]	train-auc:0.943649	valid-auc:0.756618
[142]	train-auc:0.944012	valid-auc:0.756617
[143]	train-auc:0.944754	valid-auc:0.756428
[144]	train-auc:0.945556	valid-auc:0.756615
[145]	train-auc:0.945766	valid-auc:0.756773
[146]	train-auc:0.946295	valid-auc:0.756839
[147]	train-auc:0.946545	valid-auc:0.756797
[148]	train-auc:0.94681	valid-auc:0.756866
[149]	train-auc:0.947063	valid-auc:0.756817
[150]	train-auc:0.947205	valid-auc:0.756854
[151]	train-auc:0.947312	valid-auc:0.75681
[152]	train-auc:0.947974	valid-auc:0.756644
[153]	train-auc:0.948563	valid-auc:0.756765
[154]	train-auc:0.949197	valid-auc:0.756916
[155]	train-auc:0.949564	valid-auc:0.756935
[156]	train-auc:0.949829	valid-auc:0.756799
[157]	train-auc:0.950766	valid-auc:0.756628
[158]	train-auc:0.951621	valid-auc:0.756635
[159]	train-auc:0.952278	valid-auc:0.75692
[160]	train-auc:0.952612	valid-auc:0.756985
[161]	train-auc:0.953103	valid-auc:0.756934
[162]	train-auc:0.953507	valid-auc:0.757185
[163]	train-auc:0.95389	valid-auc:0.757363
[164]	train-auc:0.954489	valid-auc:0.757497
[165]	train-auc:0.955041	valid-auc:0.757436
[166]	train-auc:0.9559	valid-auc:0.75771
[167]	train-auc:0.955979	valid-auc:0.757698
[168]	train-auc:0.956312	valid-auc:0.757778
[169]	train-auc:0.956569	valid-auc:0.757836
[170]	train-auc:0.957083	valid-auc:0.757777
[171]	train-auc:0.957432	valid-auc:0.757662
[172]	train-auc:0.957728	valid-auc:0.757593
[173]	train-auc:0.958023	valid-auc:0.757551
[174]	train-auc:0.95817	valid-auc:0.757578
[175]	train-auc:0.958676	valid-auc:0.757687
[176]	train-auc:0.95882	valid-auc:0.757645
[177]	train-auc:0.95932	valid-auc:0.757604
[178]	train-auc:0.95952	valid-auc:0.757543
[179]	train-auc:0.959842	valid-auc:0.757526
[180]	train-auc:0.960309	valid-auc:0.757527
[181]	train-auc:0.960604	valid-auc:0.757531
[182]	train-auc:0.961069	valid-auc:0.757523
[183]	train-auc:0.961222	valid-auc:0.757498
[184]	train-auc:0.961344	valid-auc:0.757493
[185]	train-auc:0.961634	valid-auc:0.757652
[186]	train-auc:0.962034	valid-auc:0.757637
[187]	train-auc:0.962316	valid-auc:0.757661
[188]	train-auc:0.962542	valid-auc:0.757677
[189]	train-auc:0.962921	valid-auc:0.757864
[190]	train-auc:0.963024	valid-auc:0.757913
[191]	train-auc:0.963478	valid-auc:0.757979
[192]	train-auc:0.96377	valid-auc:0.757867
[193]	train-auc:0.963834	valid-auc:0.757848
[194]	train-auc:0.964248	valid-auc:0.757851
[195]	train-auc:0.964642	valid-auc:0.757929
[196]	train-auc:0.964989	valid-auc:0.757725
[197]	train-auc:0.965253	valid-auc:0.757728
[198]	train-auc:0.965382	valid-auc:0.757706
[199]	train-auc:0.965498	valid-auc:0.757736
[200]	train-auc:0.965819	valid-auc:0.757729
[201]	train-auc:0.966219	valid-auc:0.757683
[202]	train-auc:0.966791	valid-auc:0.757711
[203]	train-auc:0.966989	valid-auc:0.757804
[204]	train-auc:0.967416	valid-auc:0.757699
[205]	train-auc:0.967584	valid-auc:0.757708
[206]	train-auc:0.968053	valid-auc:0.757765
[207]	train-auc:0.968432	valid-auc:0.757635
[208]	train-auc:0.968662	valid-auc:0.757576
[209]	train-auc:0.968773	valid-auc:0.757551
[210]	train-auc:0.969208	valid-auc:0.757667
[211]	train-auc:0.969502	valid-auc:0.757593
[212]	train-auc:0.969722	valid-auc:0.757539
[213]	train-auc:0.969894	valid-auc:0.757557
[214]	train-auc:0.970278	valid-auc:0.757753
[215]	train-auc:0.970549	valid-auc:0.757688
[216]	train-auc:0.97071	valid-auc:0.75766
[217]	train-auc:0.970824	valid-auc:0.757699
[218]	train-auc:0.970962	valid-auc:0.757732
[219]	train-auc:0.971032	valid-auc:0.757752
[220]	train-auc:0.971536	valid-auc:0.757596
[221]	train-auc:0.971591	valid-auc:0.757618
[222]	train-auc:0.971733	valid-auc:0.757677
[223]	train-auc:0.972118	valid-auc:0.757693
[224]	train-auc:0.972253	valid-auc:0.757603
[225]	train-auc:0.972351	valid-auc:0.757612
[226]	train-auc:0.972523	valid-auc:0.757579
[227]	train-auc:0.972839	valid-auc:0.757655
[228]	train-auc:0.973268	valid-auc:0.757741
[229]	train-auc:0.973532	valid-auc:0.757789
[230]	train-auc:0.973886	valid-auc:0.757685
[231]	train-auc:0.974172	valid-auc:0.757673
[232]	train-auc:0.974309	valid-auc:0.757565
[233]	train-auc:0.97448	valid-auc:0.757545
[234]	train-auc:0.974685	valid-auc:0.757505
[235]	train-auc:0.974891	valid-auc:0.757379
[236]	train-auc:0.975031	valid-auc:0.757305
[237]	train-auc:0.975171	valid-auc:0.75724
[238]	train-auc:0.975394	valid-auc:0.757141
[239]	train-auc:0.975648	valid-auc:0.757158
[240]	train-auc:0.975901	valid-auc:0.756953
[241]	train-auc:0.976391	valid-auc:0.757017
Stopping. Best iteration:
[191]	train-auc:0.963478	valid-auc:0.757979

[mlcrate] Finished training fold 6 - took 54s - running score 0.753265
[mlcrate] Finished training 7 XGBoost models, took 7m24s

In [33]:
p_train


Out[33]:
array([ 0.21688,  0.40872,  0.87946, ...,  0.80383,  0.69749,  0.93272], dtype=float32)