In [1]:
print("hello")


hello

In [4]:
import $exclude.`org.slf4j:slf4j-log4j12`, $ivy.`org.slf4j:slf4j-nop:1.7.21` // for cleaner logs
import $profile.`hadoop-2.6`
import $ivy.`org.apache.spark::spark-sql:2.1.0` // adjust spark version - spark >= 2.0
import $ivy.`org.apache.hadoop:hadoop-aws:2.6.4`
import $ivy.`org.jupyter-scala::spark:0.4.2` // for JupyterSparkSession (SparkSession aware of the jupyter-scala kernel)

import org.apache.spark._
import org.apache.spark.sql._
import jupyter.spark.session._

val sparkSession = JupyterSparkSession.builder() // important - call this rather than SparkSession.builder()
  .jupyter() // this method must be called straightaway after builder()
  // .yarn("/etc/hadoop/conf") // optional, for Spark on YARN - argument is the Hadoop conf directory
  // .emr("2.6.4") // on AWS ElasticMapReduce, this adds aws-related to the spark jar list
  .master("local") // change to "yarn-client" on YARN
  // .config("spark.executor.instances", "10")
  .config("spark.executor.memory", "1g")
  // .config("spark.hadoop.fs.s3a.access.key", awsCredentials._1)
  // .config("spark.hadoop.fs.s3a.secret.key", awsCredentials._2)
  .appName("notebook")
  .getOrCreate()


java.lang.IllegalArgumentException: System memory 466092032 must be at least 471859200. Please increase heap size using the --driver-memory option or spark.driver.memory in Spark configuration.
  org.apache.spark.memory.UnifiedMemoryManager$.getMaxMemory(UnifiedMemoryManager.scala:216)
  org.apache.spark.memory.UnifiedMemoryManager$.apply(UnifiedMemoryManager.scala:198)
  org.apache.spark.SparkEnv$.create(SparkEnv.scala:330)
  org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:174)
  org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:257)
  org.apache.spark.SparkContext.<init>(SparkContext.scala:432)
  org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2313)
  org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:868)
  org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:860)
  scala.Option.getOrElse(Option.scala:121)
  org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:860)
  jupyter.spark.session.JupyterSparkSession$Builder.getOrCreate(JupyterSparkSession.scala:48)
  $sess.cmd3Wrapper$Helper.<init>(cmd3.sc:21)
  $sess.cmd3Wrapper.<init>(cmd3.sc:123)
  $sess.cmd3$.<init>(cmd3.sc:63)
  $sess.cmd3$.<clinit>(cmd3.sc:-1)

In [2]:
sparkSession


cmd2.sc:1: not found: value sparkSession
val res2 = sparkSession
           ^
Compilation Failed

In [4]:
sc


cmd4.sc:1: not found: value sc
val res4 = sc
           ^
Compilation Failed

In [ ]: