예제 #1
0
 def __init__(self):
     try:
         Initializer.load_sparkling_jar()
         _jvm = SparkSession._instantiatedSession.sparkContext._jvm
         self._jconf = _jvm.ai.h2o.sparkling.H2OConf()
     except:
         raise
def testSparkSubmitOptsNoJars():
    os.environ["PYSPARK_SUBMIT_ARGS"] = "--conf spark.app.name=test"
    jar = Initializer._Initializer__get_sw_jar(None)
    Initializer._Initializer__setUpPySparkSubmitArgs()
    propEdited = os.environ["PYSPARK_SUBMIT_ARGS"]
    del os.environ["PYSPARK_SUBMIT_ARGS"]
    assert propEdited == "--jars {} --conf spark.app.name=test".format(jar)
예제 #3
0
 def __init__(self):
     """
      This constructor is used just to initialize the environment. It does not start H2OContext.
      To start H2OContext use one of the getOrCreate methods. This constructor is internally used in those methods
     """
     try:
         Initializer.load_sparkling_jar()
     except:
         raise
예제 #4
0
 def __init__(self, spark=None):
     try:
         if spark is not None:
             warnings.warn(
                 "Constructor H2OConf(spark) with spark argument is deprecated. Please use just H2OConf(). "
                 "The argument will be removed in release 3.32.")
         Initializer.load_sparkling_jar()
         self._jconf = _jvm().org.apache.spark.h2o.H2OConf()
     except:
         raise
예제 #5
0
 def __init__(self, spark_session):
     """
      This constructor is used just to initialize the environment. It does not start H2OContext.
      To start H2OContext use one of the getOrCreate methods. This constructor is internally used in those methods
     """
     try:
         self.__do_init(spark_session)
         _monkey_patch_H2OFrame(self)
         Initializer.load_sparkling_jar()
     except:
         raise
예제 #6
0
    def __init__(self, spark):
        try:
            spark_session = spark
            if isinstance(spark, SparkContext):
                warnings.warn("Method H2OContext.getOrCreate with argument of type SparkContext is deprecated and " +
                              "parameter of type SparkSession is preferred.")
                spark_session = SparkSession.builder.getOrCreate()

            Initializer.load_sparkling_jar()
            self._do_init(spark_session)
        except:
            raise
예제 #7
0
 def createFromMojo(pathToMojo, settings=H2OMOJOSettings.default()):
     # We need to make sure that Sparkling Water classes are available on the Spark driver and executor paths
     Initializer.load_sparkling_jar()
     javaModel = _jvm(
     ).ai.h2o.sparkling.ml.models.H2OMOJOModel.createFromMojo(
         pathToMojo, settings.toJavaObject())
     className = javaModel.getClass().getSimpleName()
     if className == "H2OSupervisedMOJOModel":
         return H2OSupervisedMOJOModel(javaModel)
     elif className == "H2OUnsupervisedMOJOModel":
         return H2OUnsupervisedMOJOModel(javaModel)
     else:
         return H2OMOJOModel(javaModel)
 def createFromMojo(pathToMojo, settings=H2OMOJOSettings.default()):
     # We need to make sure that Sparkling Water classes are available on the Spark driver and executor paths
     Initializer.load_sparkling_jar()
     javaModel = _jvm().ai.h2o.sparkling.ml.models.H2OMOJOPipelineModel.createFromMojo(pathToMojo,
                                                                                       settings.toJavaObject())
     return H2OMOJOPipelineModel(javaModel)
예제 #9
0
 def read(path):
     # We need to make sure that Sparkling Water classes are available on the Spark
     # driver and executor paths
     Initializer.load_sparkling_jar()
     javaModel = _jvm().ai.h2o.sparkling.ml.models.H2OBinaryModel.read(path)
     return H2OBinaryModel(javaModel)