In [ ]:
!pip3 install kfp --upgrade
In [ ]:
import kfp.dsl as dsl
import kfp
from kfp import components
import json
# Create kfp client
# Note: Add the KubeFlow Pipeline endpoint below if the client is not running on the same cluster.
client = kfp.Client('kfserving_endpoint')
EXPERIMENT_NAME = 'KFServing Experiments'
experiment = client.create_experiment(name=EXPERIMENT_NAME)
In [ ]:
kfserving_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/kubeflow/kfserving/component.yaml')
@dsl.pipeline(
name='kfserving pipeline',
description='A pipeline for kfserving.'
)
def kfservingPipeline(
action = 'update',
model_name='tf-sample',
default_model_uri='gs://kfserving-samples/models/tensorflow/flowers',
canary_model_uri='gs://kfserving-samples/models/tensorflow/flowers-2',
canary_model_traffic_percentage='10',
namespace='your_namespace',
framework='tensorflow',
default_custom_model_spec='{}',
canary_custom_model_spec='{}',
autoscaling_target='0',
kfserving_endpoint=''
):
# define workflow
kfserving = kfserving_op(action = action,
model_name=model_name,
default_model_uri=default_model_uri,
canary_model_uri=canary_model_uri,
canary_model_traffic_percentage=canary_model_traffic_percentage,
namespace=namespace,
framework=framework,
default_custom_model_spec=default_custom_model_spec,
canary_custom_model_spec=canary_custom_model_spec,
autoscaling_target=autoscaling_target,
kfserving_endpoint=kfserving_endpoint).set_image_pull_policy('Always')
# Compile pipeline
import kfp.compiler as compiler
compiler.Compiler().compile(kfservingPipeline, 'tf-flower.tar.gz')
# Execute pipeline
run = client.run_pipeline(experiment.id, 'tf-flower', 'tf-flower.tar.gz')
In [ ]:
kfserving_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/kubeflow/kfserving/component.yaml')
@dsl.pipeline(
name='kfserving pipeline',
description='A pipeline for kfserving.'
)
def kfservingPipeline(
action = 'update',
model_name='custom-sample',
default_model_uri='',
canary_model_uri='',
canary_model_traffic_percentage='0',
namespace='kubeflow',
framework='custom',
default_custom_model_spec='{"name": "image-segmenter", "image": "codait/max-image-segmenter:latest", "port": "5000"}',
canary_custom_model_spec='{}',
autoscaling_target='0',
kfserving_endpoint=''
):
# define workflow
kfserving = kfserving_op(action = action,
model_name=model_name,
default_model_uri=default_model_uri,
canary_model_uri=canary_model_uri,
canary_model_traffic_percentage=canary_model_traffic_percentage,
namespace=namespace,
framework=framework,
default_custom_model_spec=default_custom_model_spec,
canary_custom_model_spec=canary_custom_model_spec,
autoscaling_target=autoscaling_target,
kfserving_endpoint=kfserving_endpoint).set_image_pull_policy('Always')
# Compile pipeline
import kfp.compiler as compiler
compiler.Compiler().compile(kfservingPipeline, 'custom.tar.gz')
# Execute pipeline
run = client.run_pipeline(experiment.id, 'custom-model', 'custom.tar.gz')
In [ ]: