Ejemplo n.º 1
0
 def create_from_mojo(path_to_mojo):
     spark_session = SparkSession.builder.getOrCreate()
     # We need to make sure that Sparkling Water classes are available on the Spark driver and executor paths
     Initializer.load_sparkling_jar(spark_session._sc)
     return H2OMOJOModel(
         spark_session._jvm.org.apache.spark.ml.h2o.models.
         JavaH2OMOJOModelHelper.createFromMojo(path_to_mojo))
Ejemplo n.º 2
0
 def __init__(self, spark_session):
     """
      This constructor is used just to initialize the environment. It does not start H2OContext.
      To start H2OContext use one of the getOrCreate methods. This constructor is internally used in those methods
     """
     try:
         self.__do_init(spark_session)
         _monkey_patch_H2OFrame(self)
         # loads sparkling water jar only if it hasn't been already loaded
         Initializer.load_sparkling_jar(self._sc)
     except:
         raise
Ejemplo n.º 3
0
 def __init__(self, spark_session):
     """
      This constructor is used just to initialize the environment. It does not start H2OContext.
      To start H2OContext use one of the getOrCreate methods. This constructor is internally used in those methods
     """
     try:
         self.__do_init(spark_session)
         _monkey_patch_H2OFrame(self)
         # Load sparkling water jar only if it hasn't been already loaded
         Initializer.load_sparkling_jar(self._sc)
     except:
         raise
Ejemplo n.º 4
0
    def __init__(self, spark):
        try:
            spark_session = spark
            if isinstance(spark, SparkContext):
                warnings.warn("Method H2OContext.getOrCreate with argument of type SparkContext is deprecated and " +
                              "parameter of type SparkSession is preferred.")
                spark_session = SparkSession.builder.getOrCreate()

            Initializer.load_sparkling_jar(spark_session._sc)
            self._do_init(spark_session)
        except:
            raise
Ejemplo n.º 5
0
    def __init__(self, spark):
        try:
            spark_session = spark
            if isinstance(spark, SparkContext):
                warnings.warn("Method H2OContext.getOrCreate with argument of type SparkContext is deprecated and " +
                              "parameter of type SparkSession is preferred.")
                spark_session = SparkSession.builder.getOrCreate()

            Initializer.load_sparkling_jar(spark_session._sc)
            self._do_init(spark_session)
        except:
            raise
Ejemplo n.º 6
0
# (the "License"); you may not use this file except in compliance with
# the License.  You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

from pysparkling.ml.algos import *
from pysparkling.ml.algos.classification import *
from pysparkling.ml.algos.regression import *
from pysparkling.ml.features import *
from pysparkling.ml.models import *

__all__ = ["ColumnPruner", "H2OGBM", "H2ODeepLearning", "H2OAutoML", "H2OXGBoost", "H2OGLM", "H2OCoxPH", "H2OGAM", "H2OMOJOModel",
           "H2OSupervisedMOJOModel", "H2OTreeBasedSupervisedMOJOModel", "H2OUnsupervisedMOJOModel", "H2OCoxPHMOJOModel",
           "H2OTreeBasedUnsupervisedMOJOModel", "H2OMOJOPipelineModel", "H2OGridSearch", "H2OMOJOSettings", "H2OKMeans",
           "H2OTargetEncoder", "H2ODRF", "H2OAutoMLClassifier", "H2OGLMClassifier", "H2OGAMClassifier", "H2OGBMClassifier", "H2OXGBoostClassifier",
           "H2ODeepLearningClassifier", "H2ODRFClassifier", "H2OAutoMLRegressor", "H2OGLMRegressor", "H2OGBMRegressor",
           "H2OGAMRegressor", "H2OXGBoostRegressor", "H2ODeepLearningRegressor", "H2ODRFRegressor", "H2OBinaryModel",
           "H2OIsolationForest", "H2OKMeansMOJOModel", "H2OGLMMOJOModel", "H2OGAMMOJOModel", "H2OGBMMOJOModel", "H2OXGBoostMOJOModel",
           "H2ODeepLearningMOJOModel", "H2ODRFMOJOModel", "H2OIsolationForestMOJOModel", "H2OWord2Vec"]

from pysparkling.initializer import Initializer

Initializer.load_sparkling_jar()
Ejemplo n.º 7
0
 def stop(self):
     warnings.warn(
         "Stopping H2OContext. (Restarting H2O is not yet fully supported...) "
     )
     Initializer.clean_temp_dir()
     self._jhc.stop(False)
Ejemplo n.º 8
0
 def stop_with_jvm(self):
     Initializer.clean_temp_dir()
     h2o.cluster().shutdown()
     self.stop()
Ejemplo n.º 9
0
    0] + "." + pysparkling_spark_version[1]


def custom_formatwarning(msg, *args, **kwargs):
    # ignore everything except the message
    return str(msg) + '\n'


warnings.formatwarning = custom_formatwarning

if not (pyspark_major == pysparkling_spark_major):
    warnings.warn("""
    You are using PySparkling for Spark {pysparkling_spark_major}, but your PySpark is of
    version {pyspark_major}. Please make sure Spark and PySparkling versions are compatible. """
                  .format(pysparkling_spark_major=pysparkling_spark_major,
                          pyspark_major=pyspark_major))

# set imports from this project which will be available when the module is imported
from pysparkling.context import H2OContext
from pysparkling.conf import H2OConf
from pysparkling.initializer import Initializer

Initializer.check_different_h2o()
# set what is meant by * packages in statement from foo import *
__all__ = ["H2OContext", "H2OConf"]

# Load sparkling water jar only if Spark is already running
sc = Initializer.active_spark_context()
if sc is not None:
    Initializer.load_sparkling_jar(sc)
Ejemplo n.º 10
0
# contributor license agreements.  See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License.  You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

import warnings

from pysparkling.initializer import Initializer


def custom_formatwarning(msg, *args, **kwargs):
    # ignore everything except the message
    return str(msg) + '\n'


warnings.formatwarning = custom_formatwarning

__version__ = Initializer.getVersion()

Initializer.load_sparkling_jar()
Ejemplo n.º 11
0
 def __init__(self, spark_context):
     try:
         Initializer.load_sparkling_jar(spark_context)
         self._do_init(spark_context)
     except:
         raise
Ejemplo n.º 12
0
# encoding: utf-8
# module pySparkling
# from (pysparkling)
"""
pySparkling - The Sparkling-Water Python Package
=====================
"""

from .features import ColumnPruner
from .algos import H2OGBM, H2ODeepLearning, H2OAutoML, H2OXGBoost, H2OGLM, H2OGridSearch
from .models import H2OMOJOModel, H2OMOJOPipelineModel
# set what is meant by * packages in statement from foo import *
__all__ = ["ColumnPruner", "H2OGBM", "H2ODeepLearning", "H2OAutoML", "H2OXGBoost", "H2OGLM", "H2OMOJOModel", "H2OMOJOPipelineModel", "H2OGridSearch"]

from pysparkling.initializer import Initializer

# Load sparkling water jar only if Spark is already running
sc = Initializer.active_spark_context()
if sc is not None:
    Initializer.load_sparkling_jar(sc)
Ejemplo n.º 13
0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

import pyspark
import warnings
import re

from pysparkling.initializer import Initializer
from pysparkling.initializer import VersionComponents

__version__ = Initializer.getVersion()
pySparklingVersionComponents = VersionComponents.parseFromVersion(__version__)
pySparkMinorVersion = re.search(r"^(\d+\.\d+)\.\d+$",
                                pyspark.__version__).group(1)


def custom_formatwarning(msg, *args, **kwargs):
    # ignore everything except the message
    return str(msg) + '\n'


warnings.formatwarning = custom_formatwarning

if not (pySparkMinorVersion == pySparklingVersionComponents.sparkVersion):
    warnings.warn("""
    You are using PySparkling for Spark {}, but your PySpark is of version {}.
Ejemplo n.º 14
0
    with zipfile.ZipFile(path.dirname(here), 'r') as archive:
        __version__ = archive.read('pysparkling/version.txt').decode('utf-8').strip()
else:
    with open(path.join(here, 'version.txt'), encoding='utf-8') as f:
        __version__ = f.read().strip()

pyspark_version = pyspark.__version__.split(".")
pysparkling_version = __version__.split(".")

def custom_formatwarning(msg, *args, **kwargs):
    # ignore everything except the message
    return str(msg) + '\n'

warnings.formatwarning = custom_formatwarning


if not (pyspark_version[0] == pysparkling_version[0] and pyspark_version[1] == pysparkling_version[1]):
    warnings.warn("""
    You are using PySparkling of version {pysparkling_version}, but your PySpark is of
    version {pyspark_version}. Please make sure Spark and PySparkling versions are compatible. """.format(pysparkling_version=__version__, pyspark_version=pyspark.__version__))


# set imports from this project which will be available when the module is imported
from pysparkling.context import H2OContext
from pysparkling.conf import H2OConf
from pysparkling.initializer import Initializer

Initializer.check_different_h2o()
# set what is meant by * packages in statement from foo import *
__all__ = ["H2OContext", "H2OConf"]
Ejemplo n.º 15
0
    with open(path.join(here, 'version.txt'), encoding='utf-8') as f:
        __version__ = f.read().strip()

pyspark_version = pyspark.__version__.split(".")
pysparkling_version = __version__.split(".")


def custom_formatwarning(msg, *args, **kwargs):
    # ignore everything except the message
    return str(msg) + '\n'


warnings.formatwarning = custom_formatwarning

if not (pyspark_version[0] == pysparkling_version[0]
        and pyspark_version[1] == pysparkling_version[1]):
    warnings.warn("""
    You are using PySparkling of version {pysparkling_version}, but your PySpark is of
    version {pyspark_version}. Please make sure Spark and PySparkling versions are compatible. """
                  .format(pysparkling_version=__version__,
                          pyspark_version=pyspark.__version__))

# set imports from this project which will be available when the module is imported
from pysparkling.context import H2OContext
from pysparkling.conf import H2OConf
from pysparkling.initializer import Initializer

Initializer.check_different_h2o()
# set what is meant by * packages in statement from foo import *
__all__ = ["H2OContext", "H2OConf"]