Пример #1
0
def install_spark(hadoop):  # pylint: disable=w0613
    dist = DistConfig(data=layer.options("apache-spark"))
    spark = Spark(dist)
    if spark.verify_resources():
        hookenv.status_set("maintenance", "Installing Apache Spark")
        spark.install()
        spark.setup_spark_config()
        spark.install_demo()
        set_state("spark.installed")
Пример #2
0
def install_spark(hadoop):  # pylint: disable=w0613
    dist = DistConfig(data=layer.options('apache-spark'))
    spark = Spark(dist)
    if spark.verify_resources():
        hookenv.status_set('maintenance', 'Installing Apache Spark')
        spark.install()
        spark.setup_spark_config()
        spark.install_demo()
        set_state('spark.installed')
Пример #3
0
def install_spark(hadoop):

    dist = get_dist_config()
    spark = Spark(dist)
    if spark.verify_resources():
        hookenv.status_set('maintenance', 'Installing Apache Spark')
        dist.add_dirs()
        dist.add_packages()
        spark.install()
        spark.setup_spark_config()
        spark.install_demo()
        set_state('spark.installed')