Пример #1
0
def init_spark_on_local(cores=2,
                        conf=None,
                        python_location=None,
                        spark_log_level="WARN",
                        redirect_spark_log=True):
    """
    Create a SparkContext with Analytics Zoo configurations on the local machine.

    :param cores: The number of cores for Spark local. Default to be 2. You can also set it to "*"
           to use all the available cores. i.e `init_spark_on_local(cores="*")`
    :param conf: You can append extra conf for Spark in key-value format.
           i.e conf={"spark.executor.extraJavaOptions": "-XX:+PrintGCDetails"}.
           Default to be None.
    :param python_location: The path to your running Python executable. If not specified, the
           default Python interpreter in effect would be used.
    :param spark_log_level: The log level for Spark. Default to be 'WARN'.
    :param redirect_spark_log: Whether to redirect the Spark log to local file. Default to be True.

    :return: An instance of SparkContext.
    """
    from zoo.util.spark import SparkRunner
    runner = SparkRunner(spark_log_level=spark_log_level,
                         redirect_spark_log=redirect_spark_log)
    set_python_home()
    return runner.init_spark_on_local(cores=cores,
                                      conf=conf,
                                      python_location=python_location)
Пример #2
0
def init_spark_on_local(cores=2, conf=None, python_location=None, spark_log_level="WARN",
                        redirect_spark_log=True):
    """Saves the Trainer state to the provided checkpoint path.

    Args:

        checkpoint (str): Path to target checkpoint file.
    """
    from zoo.util.spark import SparkRunner
    runner = SparkRunner(spark_log_level=spark_log_level,
                         redirect_spark_log=redirect_spark_log)
    set_python_home()
    return runner.init_spark_on_local(cores=cores, conf=conf,
                                      python_location=python_location)
Пример #3
0
def init_spark_on_local(cores=2, conf=None, python_location=None, spark_log_level="WARN",
                        redirect_spark_log=True):
    """
    Create a SparkContext with Zoo configuration in local machine.
    :param cores: The default value is 2 and you can also set it to *
     meaning all of the available cores. i.e `init_on_local(cores="*")`
    :param conf: A key value dictionary appended to SparkConf.
    :param python_location: The path to your running python executable.
    :param spark_log_level: Log level of Spark
    :param redirect_spark_log: Redirect the Spark log to local file or not.
    :return:
    """
    from zoo.util.spark import SparkRunner
    sparkrunner = SparkRunner(spark_log_level=spark_log_level,
                              redirect_spark_log=redirect_spark_log)
    return sparkrunner.init_spark_on_local(cores=cores, conf=conf,
                                           python_location=python_location)