Beispiel #1
0
def start_livy(hadoop):  # pylint: disable=w0613
    hookenv.status_set("maintenance", "Setting up Livy REST server")
    dist = DistConfig(data=layer.options("livy"))
    livy = Livy(dist)
    mode = hookenv.config()["spark_execution_mode"]
    livy.configure(mode)
    livy.start()
    livy.open_ports()
    set_state("livy.started")
    hookenv.status_set("active", "Ready")
Beispiel #2
0
def start_livy(hadoop):  # pylint: disable=w0613
    hookenv.status_set('maintenance', 'Setting up Livy REST server')
    dist = DistConfig(data=layer.options('livy'))
    livy = Livy(dist)
    mode = hookenv.config()['spark_execution_mode']
    livy.configure(mode)
    livy.start()
    livy.open_ports()
    set_state('livy.started')
    hookenv.status_set('active', 'Ready')
Beispiel #3
0
def reconfigure_spark(hadoop):  # pylint: disable=w0613
    config = hookenv.config()
    if not data_changed("configuration", config):
        return

    hookenv.status_set("maintenance", "Configuring Apache Spark and Livy REST server")
    dist = DistConfig(data=layer.options("apache-spark"))
    spark = Spark(dist)
    dist = DistConfig(data=layer.options("livy"))
    livy = Livy(dist)

    livy.stop()
    spark.stop()
    spark.configure()
    mode = hookenv.config()["spark_execution_mode"]
    livy.configure(mode)
    spark.start()
    livy.start()
    hookenv.status_set("active", "Ready")
Beispiel #4
0
def reconfigure_spark(hadoop):  # pylint: disable=w0613
    config = hookenv.config()
    if not data_changed('configuration', config):
        return

    hookenv.status_set('maintenance',
                       'Configuring Apache Spark and Livy REST server')
    dist = DistConfig(data=layer.options('apache-spark'))
    spark = Spark(dist)
    dist = DistConfig(data=layer.options('livy'))
    livy = Livy(dist)

    livy.stop()
    spark.stop()
    spark.configure()
    mode = hookenv.config()['spark_execution_mode']
    livy.configure(mode)
    spark.start()
    livy.start()
    hookenv.status_set('active', 'Ready')