running the kernel jupyter-scala installed at https://github.com/jupyter-scala/jupyter-scala


In [7]:
import $exclude.`org.slf4j:slf4j-log4j12`, $ivy.`org.slf4j:slf4j-nop:1.7.21` // for cleaner logs
import $profile.`hadoop-2.6`
import $ivy.`org.apache.spark::spark-sql:2.1.0` // adjust spark version - spark >= 2.0
import $ivy.`org.apache.hadoop:hadoop-aws:2.6.4`
import $ivy.`org.jupyter-scala::spark:0.4.2` // for JupyterSparkSession (SparkSession aware of the jupyter-scala kernel)

import org.apache.spark._
import org.apache.spark.sql._
import jupyter.spark.session._

val sparkSession = JupyterSparkSession.builder() // important - call this rather than SparkSession.builder()
  .jupyter() // this method must be called straightaway after builder()
  // .yarn("/etc/hadoop/conf") // optional, for Spark on YARN - argument is the Hadoop conf directory
  // .emr("2.6.4") // on AWS ElasticMapReduce, this adds aws-related to the spark jar list


Downloading https://repo1.maven.org/maven2/org/slf4j/slf4j-nop/1.7.21/slf4j-nop-1.7.21.pom.sha1
Downloading https://repo1.maven.org/maven2/org/slf4j/slf4j-nop/1.7.21/slf4j-nop-1.7.21.pom
Downloaded https://repo1.maven.org/maven2/org/slf4j/slf4j-nop/1.7.21/slf4j-nop-1.7.21.pom.sha1
Downloaded https://repo1.maven.org/maven2/org/slf4j/slf4j-nop/1.7.21/slf4j-nop-1.7.21.pom
Downloading https://repo1.maven.org/maven2/org/slf4j/slf4j-nop/1.7.21/
Downloaded https://repo1.maven.org/maven2/org/slf4j/slf4j-nop/1.7.21/
Downloading https://repo1.maven.org/maven2/org/slf4j/slf4j-parent/1.7.21/slf4j-parent-1.7.21.pom
Downloading https://repo1.maven.org/maven2/org/slf4j/slf4j-parent/1.7.21/slf4j-parent-1.7.21.pom.sha1
Downloaded https://repo1.maven.org/maven2/org/slf4j/slf4j-parent/1.7.21/slf4j-parent-1.7.21.pom
Downloaded https://repo1.maven.org/maven2/org/slf4j/slf4j-parent/1.7.21/slf4j-parent-1.7.21.pom.sha1
Downloading https://repo1.maven.org/maven2/org/slf4j/slf4j-parent/1.7.21/
Downloaded https://repo1.maven.org/maven2/org/slf4j/slf4j-parent/1.7.21/
Downloading https://repo1.maven.org/maven2/org/slf4j/slf4j-api/1.7.21/slf4j-api-1.7.21.pom
Downloading https://repo1.maven.org/maven2/org/slf4j/slf4j-api/1.7.21/slf4j-api-1.7.21.pom.sha1
Downloaded https://repo1.maven.org/maven2/org/slf4j/slf4j-api/1.7.21/slf4j-api-1.7.21.pom
Downloaded https://repo1.maven.org/maven2/org/slf4j/slf4j-api/1.7.21/slf4j-api-1.7.21.pom.sha1
Downloading https://repo1.maven.org/maven2/org/slf4j/slf4j-api/1.7.21/
Downloaded https://repo1.maven.org/maven2/org/slf4j/slf4j-api/1.7.21/
Downloading https://repo1.maven.org/maven2/org/slf4j/slf4j-nop/1.7.21/slf4j-nop-1.7.21.jar.sha1
Downloading https://repo1.maven.org/maven2/org/slf4j/slf4j-nop/1.7.21/slf4j-nop-1.7.21.jar
Downloading https://repo1.maven.org/maven2/org/slf4j/slf4j-api/1.7.21/slf4j-api-1.7.21.jar
Downloading https://repo1.maven.org/maven2/org/slf4j/slf4j-api/1.7.21/slf4j-api-1.7.21.jar.sha1
Downloaded https://repo1.maven.org/maven2/org/slf4j/slf4j-nop/1.7.21/slf4j-nop-1.7.21.jar.sha1
Downloaded https://repo1.maven.org/maven2/org/slf4j/slf4j-nop/1.7.21/slf4j-nop-1.7.21.jar
Downloaded https://repo1.maven.org/maven2/org/slf4j/slf4j-api/1.7.21/slf4j-api-1.7.21.jar.sha1
Downloaded https://repo1.maven.org/maven2/org/slf4j/slf4j-api/1.7.21/slf4j-api-1.7.21.jar
Downloading https://repo1.maven.org/maven2/org/apache/spark/spark-sql_2.12/2.1.0/spark-sql_2.12-2.1.0.pom
Downloading https://repo1.maven.org/maven2/org/apache/spark/spark-sql_2.12/2.1.0/spark-sql_2.12-2.1.0.pom.sha1
Downloading https://oss.sonatype.org/content/repositories/releases/org/apache/spark/spark-sql_2.12/2.1.0/spark-sql_2.12-2.1.0.pom
Downloading https://oss.sonatype.org/content/repositories/releases/org/apache/spark/spark-sql_2.12/2.1.0/spark-sql_2.12-2.1.0.pom.sha1
ammonite.runtime.tools.DependencyThing$IvyResolutionException: failed to resolve dependencies:
  Dependency(org.apache.spark:spark-sql_2.12,2.1.0,compile,Set((org.slf4j,slf4j-log4j12)),Attributes(,),false,true): not found: /home/ale/.ivy2/local/org.apache.spark/spark-sql_2.12/2.1.0/ivys/ivy.xml, not found: https://repo1.maven.org/maven2/org/apache/spark/spark-sql_2.12/2.1.0/spark-sql_2.12-2.1.0.pom, not found: https://oss.sonatype.org/content/repositories/releases/org/apache/spark/spark-sql_2.12/2.1.0/spark-sql_2.12-2.1.0.pom
  ammonite.runtime.tools.DependencyThing.resolveArtifact(DependencyThing.scala:93)
  ammonite.interp.Interpreter.loadIvy(Interpreter.scala:665)
  ammonite.runtime.ImportHook$BaseIvy.liftedTree1$1(ImportHook.scala:182)
  ammonite.runtime.ImportHook$BaseIvy.$anonfun$resolve$2(ImportHook.scala:182)
  ammonite.util.Res$Success.flatMap(Res.scala:58)
  ammonite.runtime.ImportHook$BaseIvy.resolve(ImportHook.scala:179)
  ammonite.runtime.ImportHook$BaseIvy.$anonfun$handle$7(ImportHook.scala:191)
  ammonite.util.Res$.$anonfun$map$1(Res.scala:34)
  scala.collection.LinearSeqOptimized.foldLeft(LinearSeqOptimized.scala:122)
  scala.collection.LinearSeqOptimized.foldLeft$(LinearSeqOptimized.scala:118)
  scala.collection.immutable.List.foldLeft(List.scala:86)
  ammonite.util.Res$.map(Res.scala:31)
  ammonite.runtime.ImportHook$BaseIvy.$anonfun$handle$6(ImportHook.scala:191)
  ammonite.util.Res$Success.flatMap(Res.scala:58)
  ammonite.runtime.ImportHook$BaseIvy.handle(ImportHook.scala:190)
  ammonite.interp.Interpreter.$anonfun$resolveSingleImportHook$5(Interpreter.scala:181)
  ammonite.util.Res$Success.flatMap(Res.scala:58)
  ammonite.interp.Interpreter.resolveSingleImportHook(Interpreter.scala:180)
  ammonite.interp.Interpreter.$anonfun$resolveImportHooks$3(Interpreter.scala:229)
  ammonite.util.Res$.$anonfun$map$1(Res.scala:34)
  scala.collection.IndexedSeqOptimized.foldLeft(IndexedSeqOptimized.scala:56)
  scala.collection.IndexedSeqOptimized.foldLeft$(IndexedSeqOptimized.scala:64)
  scala.collection.mutable.ArrayBuffer.foldLeft(ArrayBuffer.scala:48)
  ammonite.util.Res$.map(Res.scala:31)
  ammonite.interp.Interpreter.resolveImportHooks(Interpreter.scala:229)
  ammonite.interp.Interpreter.$anonfun$processLine$3(Interpreter.scala:245)
  ammonite.util.Catching.flatMap(Res.scala:109)
  ammonite.interp.Interpreter.processLine(Interpreter.scala:239)
  jupyter.scala.Interp.$anonfun$interpret$8(Interp.scala:147)
  jupyter.scala.Capture$.$anonfun$withErr$1(Capture.scala:46)
  scala.util.DynamicVariable.withValue(DynamicVariable.scala:58)
  scala.Console$.withErr(Console.scala:192)
  jupyter.scala.Capture$.withErr(Capture.scala:42)
  jupyter.scala.Capture$.$anonfun$withOutAndErr$3(Capture.scala:59)
  jupyter.scala.Capture$.$anonfun$withOut$1(Capture.scala:37)
  scala.util.DynamicVariable.withValue(DynamicVariable.scala:58)
  scala.Console$.withOut(Console.scala:163)
  jupyter.scala.Capture$.withOut(Capture.scala:33)
  jupyter.scala.Capture$.withOutAndErr(Capture.scala:59)
  jupyter.scala.Capture$.apply(Capture.scala:106)
  jupyter.scala.Interp.capturingOutput(Interp.scala:104)
  jupyter.scala.Interp.$anonfun$interpret$7(Interp.scala:146)
  jupyter.scala.Scoped.$anonfun$flatMap$1(Signaller.scala:45)
  jupyter.scala.Signaller.apply(Signaller.scala:30)
  jupyter.scala.Scoped.flatMap(Signaller.scala:45)
  jupyter.scala.Scoped.flatMap$(Signaller.scala:45)
  jupyter.scala.Signaller.flatMap(Signaller.scala:12)
  jupyter.scala.Interp.$anonfun$interpret$5(Interp.scala:142)
  ammonite.util.Res$Success.flatMap(Res.scala:58)
  jupyter.scala.Interp.interpret(Interp.scala:136)
  jupyter.kernel.interpreter.InterpreterHandler$.$anonfun$execute$3(InterpreterHandler.scala:122)
  jupyter.kernel.interpreter.InterpreterHandler$.$anonfun$publishing$2(InterpreterHandler.scala:59)
  scalaz.concurrent.Task$.Try(Task.scala:457)
  scalaz.concurrent.Task$.$anonfun$unsafeStart$1(Task.scala:363)
  scalaz.concurrent.Future$$anon$3.call(Future.scala:432)
  scalaz.concurrent.Future$$anon$3.call(Future.scala:432)
  java.util.concurrent.FutureTask.run(FutureTask.java:266)
  java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
  java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
  java.lang.Thread.run(Thread.java:748)

Small example found around in the web


In [5]:
def oncePerSecond(callback: () => Boolean) {
while(callback()) {
  Thread sleep 1000
}
}

/** Counts down from 0 to 1, approxmimately once per second.
*/
def main(args: Array[String]) {
var count = 10
oncePerSecond(() => {println(count); count -= 1; count > 0})
}


Out[5]:
defined function oncePerSecond
defined function main

In [6]:
var z = Array("Zara", "Nuha", "Ayan")


Out[6]:
z: Array[String] = Array("Zara", "Nuha", "Ayan")

In [7]:
main(z)


10
9
8
7
6
5
4
3
2
1

In [ ]: