In [1]:
import findspark; findspark.init()
In [2]:
import sparkhpc
In [3]:
sj = sparkhpc.sparkjob.LSFSparkJob(ncores=4)
sj.wait_to_start()
In [4]:
sj
Out[4]:
In [5]:
sj2 = sparkhpc.sparkjob.LSFSparkJob(ncores=10)
sj2.submit()
In [6]:
sj.show_clusters()
In [7]:
from pyspark import SparkContext
In [8]:
sc = SparkContext(master=sj.master_url)
In [9]:
sc.parallelize(range(100)).count()
Out[9]:
In [10]:
sj.stop()
sj2.stop()
In [11]:
sj.show_clusters()
In [ ]: