In [ ]:
%%classpath add mvn
org.apache.spark spark-sql_2.12 2.4.4

In [ ]:
%%spark --noUI
SparkSession.builder()
      .appName("Simple Application")
      .master("local[4]")

In [ ]:
import scala.math.random
val NUM_SAMPLES = 10000000

val count2 = spark.sparkContext.parallelize(1 to NUM_SAMPLES).map{i =>
  val x = random
  val y = random
  if (x*x + y*y < 1) 1 else 0
}.reduce(_ + _)

"Pi is roughly " + 4.0 * count2 / NUM_SAMPLES

In [ ]:
spark.sparkContext.stop()