Packages your Python function, Python file or Jupyter notebook as a Docker image

Environment Setup


In [ ]:
import sys
import time
from kubeflow import fairing
from kubeflow.fairing import TrainJob
from kubeflow.fairing.backends import KubeflowAWSBackend

In [ ]:
PY_VERSION = ".".join([str(x) for x in sys.version_info[0:3]])
BASE_IMAGE = 'registry.hub.docker.com/library/python:{}'.format(PY_VERSION)

# Setting up AWS Elastic Container Registry (ECR) for storing output containers
# Set your own AWS_REGION
AWS_ACCOUNT_ID=fairing.cloud.aws.guess_account_id()
AWS_REGION='us-west-2'
DOCKER_REGISTRY = '{}.dkr.ecr.{}.amazonaws.com'.format(AWS_ACCOUNT_ID, AWS_REGION)

Convert Python function


In [ ]:
def train():
    print("simple train job!")

job = TrainJob(train, base_docker_image=BASE_IMAGE, docker_registry=DOCKER_REGISTRY, backend=KubeflowAWSBackend())
job.submit()

Convert Python file


In [ ]:
%%writefile train.py
print("hello world!")

In [ ]:
job = TrainJob("train.py", base_docker_image=BASE_IMAGE, docker_registry=DOCKER_REGISTRY, backend=KubeflowAWSBackend())
job.submit()

Convert Jupyter Notebook


In [ ]:
%%writefile requirements.txt
papermill
jupyter

In [ ]:
# We already have a train.ipynb in the same folder
job = TrainJob("train.ipynb", input_files=["requirements.txt"], base_docker_image=BASE_IMAGE, docker_registry=DOCKER_REGISTRY, backend=KubeflowAWSBackend())
job.submit()