In [ ]:
import pyspark

conf = pyspark.SparkConf()
conf.setMaster("mesos://controller:5050")
conf.set("spark.mesos.executor.docker.image","registry:5000/bdkd:spark_mesos_v11")
conf.set("spark.mesos.executor.home","/opt/spark-1.4.0-bin-hadoop2.4")
conf.set("spark.executorEnv.MESOS_NATIVE_JAVA_LIBRARY","/usr/local/lib/libmesos.so")
sc = pyspark.SparkContext(conf=conf)

In [ ]:
# First time takes a while because it's downloading spark docker image (1GB)
sc.parallelize(range(1000)).count()

In [ ]:
sc.stop()

In [ ]: