In [ ]:
%%classpath add mvn
org.apache.spark spark-mesos_2.11 2.4.0
In [ ]:
%%spark
SparkSession.builder()
.appName("Simple Application")
.config("spark.executor.uri", "PATH_TO/spark-2.4.0-bin-hadoop2.7.tgz")
// or
// .config("spark.mesos.executor.home", "SPARK_HOME")
.master("mesos://127.0.0.1:5050")
In [ ]:
import scala.math.random
val NUM_SAMPLES = 10000000
val count2 = spark.sparkContext.parallelize(1 to NUM_SAMPLES).map{i =>
val x = random
val y = random
if (x*x + y*y < 1) 1 else 0
}.reduce(_ + _)
println("Pi is roughly " + 4.0 * count2 / NUM_SAMPLES)