Example #1
0
 def test_access_by_instance(self):
     with peachbox.Spark() as s1:
         s2 = peachbox.Spark.Instance()
         self.assertEqual(s1, s2)
Example #2
0
 def test_streaming_context_initialization(self):
     with peachbox.Spark() as spark:
         assert spark.streaming_context(dstream_time_interval=2)
         spark.stop()
         assert not spark._streaming_context
Example #3
0
 def test_init(self):
     with peachbox.Spark() as spark:
         self.assertIsInstance(spark, peachbox.Spark)
Example #4
0
 def test_stop_sql_context(self):
     with peachbox.Spark() as spark:
         assert spark.sql_context()
         spark.stop()
         assert not spark._sql_context
Example #5
0
 def test_sql_context(self):
     with peachbox.Spark() as spark:
         self.assertIsInstance(spark.sql_context(), pyspark.sql.SQLContext)
Example #6
0
 def test_rdd_creation(self):
     with peachbox.Spark() as spark:
         rdd = spark.context().parallelize([1, 2])
         self.assertEqual([1, 2], rdd.collect())
Example #7
0
 def test_stop_and_relaunch(self):
     with peachbox.Spark() as spark:
         spark.context()
         spark.stop()
         spark.spark_conf = {'spark.app.name': 'new_launch'}
         self.assertEqual('new_launch', spark.context().appName)
Example #8
0
 def test_spark_conf(self):
     conf = {'spark.app.name': 'test_name', 'spark.master': 'local[*]'}
     with peachbox.Spark(conf) as spark:
         spark_conf = dict(spark.get_spark_conf().getAll())
         self.assertEqual('test_name', spark_conf.get('spark.app.name'))
         self.assertEqual('local[*]', spark_conf.get('spark.master'))