Пример #1
0
def start_spark(hadoop):  # pylint: disable=w0613
    hookenv.status_set("maintenance", "Setting up Apache Spark")
    dist = DistConfig(data=layer.options("apache-spark"))
    spark = Spark(dist)
    spark.configure()
    spark.start()
    spark.open_ports()
    set_state("spark.started")
Пример #2
0
def start_spark(hadoop):  # pylint: disable=w0613
    hookenv.status_set('maintenance', 'Setting up Apache Spark')
    dist = DistConfig(data=layer.options('apache-spark'))
    spark = Spark(dist)
    spark.configure()
    spark.start()
    spark.open_ports()
    set_state('spark.started')
Пример #3
0
def start_spark(hadoop):
    hookenv.status_set('maintenance', 'Setting up Apache Spark')
    spark = Spark(get_dist_config())

    spark.configure()
    spark.start()
    spark.open_ports()
    set_state('spark.started')
    hookenv.status_set('active', 'Ready')
Пример #4
0
def start_spark(*args):
    from charms.spark import Spark  # in lib/charms; not available until after bootstrap

    hookenv.status_set('maintenance', 'Setting up Apache Spark')
    spark = Spark()
    spark.configure()
    spark.start()
    spark.open_ports()
    set_state('spark.started')
    hookenv.status_set('active', 'Ready')