Client Integration (XGBoost)

This example an XGBoost MNIST workflow and incorportates Verta's Client integration.


In [1]:
HOST = "app.verta.ai"

PROJECT_NAME = "MNIST Multiclassification"
EXPERIMENT_NAME = "Boosted Trees"

In [2]:
# import os
# os.environ['VERTA_EMAIL'] = 
# os.environ['VERTA_DEV_KEY'] =

In [3]:
from verta import Client

client = Client(HOST)
proj = client.set_project(PROJECT_NAME)
expt = client.set_experiment(EXPERIMENT_NAME)

Imports


In [4]:
from __future__ import print_function

from sklearn import datasets
from sklearn.model_selection import train_test_split
import xgboost as xgb

Log Workflow

Prepare Data


In [5]:
digits = datasets.load_digits()
n_samples = len(digits.images)
data = digits.images.reshape((n_samples, -1))

X_train, X_test, y_train, y_test = train_test_split(
    data, digits.target, test_size=0.5, shuffle=False)

dtrain = xgb.DMatrix(X_train, label=y_train)
dtest = xgb.DMatrix(X_test, label=y_test)

Define Model


In [6]:
params = {
    'eta': 0.08,
    'max_depth': 6,
    'subsample': 0.8,
    'colsample_bytree': 0.8,
    'objective': "multi:softmax",
    'eval_metric': "merror",
    'alpha': 8,
    'lambda': 2,
    'num_class': 10,
}
num_rounds = 20
early_stopping = 50

Run and Log Training


In [7]:
run = client.set_experiment_run()

In [8]:
from verta.integrations.xgboost import verta_callback


bst = xgb.train(
    params, dtrain,
    num_rounds,
    evals=[(dtrain, "train"), (dtest, "eval")],
    early_stopping_rounds=early_stopping,
    verbose_eval=False,
    callbacks=[verta_callback(run)],
)

In [9]:
run