Esempio n. 1
0
    def setUpClass(cls):
        """Setup a basic Spark context for testing"""
        class_name = cls.__name__
        cls.sc = SparkContext(cls.getMaster(), appName=class_name)
        quiet_logs(cls.sc)
        _scala_HiveContext =\
            cls.sc._jvm.org.apache.spark.sql.hive.test.TestHiveContext(
                cls.sc._jsc.sc()
            )
        cls.sql_context = HiveContext(cls.sc, _scala_HiveContext)

        quiet_py4j()
Esempio n. 2
0
    def setUpClass(cls):
        """Setup a basic Spark context for testing"""
        class_name = cls.__name__
        cls.sc = SparkContext(cls.getMaster(), appName=class_name)
        quiet_logs(cls.sc)
        _scala_HiveContext =\
            cls.sc._jvm.org.apache.spark.sql.hive.test.TestHiveContext(
                cls.sc._jsc.sc()
            )
        cls.sql_context = HiveContext(cls.sc, _scala_HiveContext)

        quiet_py4j()
Esempio n. 3
0
import unittest

from utils import add_pyspark_path, quiet_py4j, add_jars

add_pyspark_path()
quiet_py4j()
add_jars()

from pyspark.context import SparkContext


class PySparkTestCase(unittest.TestCase):
    def setUp(self):
        class_name = self.__class__.__name__
        self.sc = SparkContext('local', class_name)
        self.sc._jvm.System.setProperty("spark.ui.showConsoleProgress",
                                        "false")
        log4j = self.sc._jvm.org.apache.log4j
        log4j.LogManager.getRootLogger().setLevel(log4j.Level.FATAL)

    def tearDown(self):
        self.sc.stop()
        # To avoid Akka rebinding to the same port, since it doesn't unbind
        # immediately on shutdown
        self.sc._jvm.System.clearProperty("spark.driver.port")
Esempio n. 4
0
 def setUpClass(cls):
     """Setup a basic Spark context for testing"""
     class_name = cls.__name__
     cls.sc = SparkContext(cls.getMaster(), appName=class_name)
     quiet_py4j()
Esempio n. 5
0
 def setUp(self):
     """Setup a basic Spark context for testing"""
     self.sc = SparkContext(self.getMaster())
     quiet_py4j()
import unittest
from utils import add_pyspark_path, quiet_py4j

add_pyspark_path()
quiet_py4j()

from pyspark.context import SparkContext


class PySparkTestCase(unittest.TestCase):
    def setUp(self):
        class_name = self.__class__.__name__
        self.sc = SparkContext('local', class_name)
        self.sc._jvm.System.setProperty("spark.ui.showConsoleProgress", "false")
        log4j = self.sc._jvm.org.apache.log4j
        log4j.LogManager.getRootLogger().setLevel(log4j.Level.FATAL)

    def tearDown(self):
        self.sc.stop()
        # To avoid Akka rebinding to the same port, since it doesn't unbind
        # immediately on shutdown
        self.sc._jvm.System.clearProperty("spark.driver.port")
Esempio n. 7
0
 def setUpClass(cls):
     """Setup a basic Spark context for testing"""
     class_name = cls.__name__
     cls.sc = SparkContext(cls.getMaster(), appName=class_name)
     quiet_py4j()
Esempio n. 8
0
 def setUp(self):
     """Setup a basic Spark context for testing"""
     self.sc = SparkContext(self.getMaster())
     quiet_py4j()