def setUpClass(cls):
     cls._spark = SparkSession.builder.config(
         conf=unit_test_utils.get_default_spark_conf()).getOrCreate()
     unit_test_utils.set_up_class(cls)
     cls._hc = H2OContext.getOrCreate(
         cls._spark,
         H2OConf(cls._spark).set_num_of_external_h2o_nodes(2))
 def setUpClass(cls):
     cls._conf = unit_test_utils.get_default_spark_conf(
         cls._spark_options_from_params)
     cls._spark = SparkSession.builder.config(conf=cls._conf).getOrCreate()
     cls._hc = H2OContext.getOrCreate(
         cls._spark,
         H2OConf(cls._spark).set_num_of_external_h2o_nodes(1))
예제 #3
0
 def setUpClass(cls):
     cls._conf = unit_test_utils.get_default_spark_conf(
         cls._spark_options_from_params)
     cls._spark = SparkSession.builder.config(conf=cls._conf).getOrCreate()
     cls._hc = H2OContext.getOrCreate(
         cls._spark,
         H2OConf(cls._spark).set_cluster_size(1))
 def setUpClass(cls):
     cls._conf = unit_test_utils.get_default_spark_conf(cls._spark_options_from_params)
     cls._spark = SparkSession.builder.config(conf=cls._conf).getOrCreate()
     dataset = cls._spark.read\
         .options(header='true', inferSchema='true')\
         .csv("file://" + unit_test_utils.locate("smalldata/prostate/prostate.csv"))
     [cls._trainingDataset, cls._testingDataset] = dataset.randomSplit([0.8, 0.2], 1)
예제 #5
0
 def setUpClass(cls):
     cls._cloud_name = generic_test_utils.unique_cloud_name("h2o_conf_test")
     cls._spark = SparkSession.builder.config(
         conf=unit_test_utils.get_default_spark_conf().set(
             "spark.ext.h2o.cloud.name", cls._cloud_name)).getOrCreate()
     unit_test_utils.set_up_class(cls)
     h2o_conf = H2OConf(cls._spark).set_num_of_external_h2o_nodes(2)
     cls._hc = H2OContext.getOrCreate(cls._spark, h2o_conf)
예제 #6
0
 def setUpClass(cls):
     cls._spark = SparkSession.builder.config(
         conf=unit_test_utils.get_default_spark_conf().setMaster(
             "yarn-client")).getOrCreate()
     unit_test_utils.set_up_class(cls)
     cls._hc = H2OContext.getOrCreate(
         cls._spark,
         H2OConf(cls._spark).set_cluster_size(1))
예제 #7
0
 def setUpClass(cls):
     cls._cloud_name = generic_test_utils.unique_cloud_name("h2o_conf_test")
     cls._conf = unit_test_utils.get_default_spark_conf(cls._spark_options_from_params). \
         set("spark.ext.h2o.cloud.name", cls._cloud_name)
     cls._spark = SparkSession.builder.config(conf=cls._conf).getOrCreate()
     cls._hc = H2OContext.getOrCreate(
         cls._spark,
         H2OConf(cls._spark).set_cluster_size(1))
 def setUpClass(cls):
     cls._conf = unit_test_utils.get_default_spark_conf(
         cls._spark_options_from_params)
     cls._spark = SparkSession.builder.config(conf=cls._conf).getOrCreate()
     cls._hc = H2OContext.getOrCreate(
         cls._spark,
         H2OConf(cls._spark).set_cluster_size(1))
     cls.dataset = cls._spark.read.csv(
         "file://" +
         unit_test_utils.locate("smalldata/iris/iris_wheader.csv"),
         header=True,
         inferSchema=True)
예제 #9
0
 def setUpClass(cls):
     cls._cloud_name = generic_test_utils.unique_cloud_name("h2o_conf_test")
     cls._conf = unit_test_utils.get_default_spark_conf(cls._spark_options_from_params). \
         set("spark.ext.h2o.cloud.name", cls._cloud_name)
     cls._spark = SparkSession.builder.config(conf=cls._conf).getOrCreate()
     cls._hc = H2OContext.getOrCreate(cls._spark, H2OConf(cls._spark).set_num_of_external_h2o_nodes(1))
 def setUpClass(cls):
     cls._cloud_name = generic_test_utils.unique_cloud_name("h2o_mojo_predictions_test")
     cls._spark = SparkSession.builder.config(conf = unit_test_utils.get_default_spark_conf()).getOrCreate()
예제 #11
0
 def setUpClass(cls):
     cls._spark = SparkSession.builder.config(
         conf=unit_test_utils.get_default_spark_conf().setMaster("yarn-client")).getOrCreate()
     unit_test_utils.set_up_class(cls)
     cls._hc = H2OContext.getOrCreate(cls._spark, H2OConf(cls._spark).set_num_of_external_h2o_nodes(1))
 def setUpClass(cls):
     cls._conf = unit_test_utils.get_default_spark_conf(
         cls._spark_options_from_params)
     cls._spark = SparkSession.builder.config(conf=cls._conf).getOrCreate()