For a more detailed guide refer to tensorflow or pytorch example or to the documentation on https://clipper.ai.
In [2]:
import logging, xgboost as xgb, numpy as np
from clipper_admin.deployers import python as python_deployer
from clipper_admin import ClipperConnection, DockerContainerManager
In [3]:
clipper_conn = ClipperConnection(DockerContainerManager())
clipper_conn.start_clipper()
clipper_conn.connect()
In [4]:
def get_test_point():
return [np.random.randint(255) for _ in range(784)]
In [5]:
# Create a training matrix.
dtrain = xgb.DMatrix(get_test_point(), label=[0])
# Create parameters, watchlist, and specify the number of rounds
param = {'max_depth': 2, 'eta': 1, 'silent': 1, 'objective': 'binary:logistic'}
watchlist = [(dtrain, 'train')]
num_round = 2
bst = xgb.train(param, dtrain, num_round, watchlist)
In [6]:
def predict(xs):
return bst.predict(xgb.DMatrix(xs))
In [8]:
# Register an xgboost app.
clipper_conn.register_application('xgboost-app', 'integers', 'default_pred', 100000)
In [10]:
# Deploy an xgboost model
python_deployer.deploy_python_closure(clipper_conn,
name='xgboost-model',
version=1,
input_type="integers",
func=predict,
pkgs_to_install=['xgboost'])
In [12]:
clipper_conn.link_model_to_app('xgboost-app', 'xgboost-model')
In [14]:
# Get query address
query_address = clipper_conn.get_query_addr()
In [15]:
# Run a query
import requests, json, numpy as np
headers = {"Content-type": "application/json"}
requests.post("http://"+query_address+"/xgboost-app/predict", headers=headers, data=json.dumps({
"input": get_test_point()})).json()
# The result is the same as in the local prediction!
Out[15]:
In [17]:
# Stop everything
clipper_conn.stop_all()
In [ ]: