Example #1
0
def install_livy(hadoop):  # pylint: disable=w0613
    dist = DistConfig(data=layer.options("livy"))
    livy = Livy(dist)
    if livy.verify_resources():
        hookenv.status_set("maintenance", "Installing Livy REST server")
        livy.install()
        set_state("livy.installed")
Example #2
0
def install_livy(hadoop):  # pylint: disable=w0613
    dist = DistConfig(data=layer.options('livy'))
    livy = Livy(dist)
    if livy.verify_resources():
        hookenv.status_set('maintenance', 'Installing Livy REST server')
        livy.install()
        set_state('livy.installed')
Example #3
0
def stop_spark():
    hookenv.status_set("maintenance", "Stopping Livy REST server")
    dist = DistConfig(data=layer.options("livy"))
    livy = Livy(dist)
    livy.close_ports()
    livy.stop()
    remove_state("livy.started")

    hookenv.status_set("maintenance", "Stopping Apache Spark")
    dist = DistConfig(data=layer.options("apache-spark"))
    spark = Spark(dist)
    spark.close_ports()
    spark.stop()
    remove_state("spark.started")
Example #4
0
def start_livy(hadoop):  # pylint: disable=w0613
    hookenv.status_set('maintenance', 'Setting up Livy REST server')
    dist = DistConfig(data=layer.options('livy'))
    livy = Livy(dist)
    mode = hookenv.config()['spark_execution_mode']
    livy.configure(mode)
    livy.start()
    livy.open_ports()
    set_state('livy.started')
    hookenv.status_set('active', 'Ready')
Example #5
0
def start_livy(hadoop):  # pylint: disable=w0613
    hookenv.status_set("maintenance", "Setting up Livy REST server")
    dist = DistConfig(data=layer.options("livy"))
    livy = Livy(dist)
    mode = hookenv.config()["spark_execution_mode"]
    livy.configure(mode)
    livy.start()
    livy.open_ports()
    set_state("livy.started")
    hookenv.status_set("active", "Ready")
Example #6
0
def reconfigure_spark(hadoop):  # pylint: disable=w0613
    config = hookenv.config()
    if not data_changed('configuration', config):
        return

    hookenv.status_set('maintenance',
                       'Configuring Apache Spark and Livy REST server')
    dist = DistConfig(data=layer.options('apache-spark'))
    spark = Spark(dist)
    dist = DistConfig(data=layer.options('livy'))
    livy = Livy(dist)

    livy.stop()
    spark.stop()
    spark.configure()
    mode = hookenv.config()['spark_execution_mode']
    livy.configure(mode)
    spark.start()
    livy.start()
    hookenv.status_set('active', 'Ready')
Example #7
0
def stop_spark():
    hookenv.status_set('maintenance', 'Stopping Livy REST server')
    dist = DistConfig(data=layer.options('livy'))
    livy = Livy(dist)
    livy.close_ports()
    livy.stop()
    remove_state('livy.started')

    hookenv.status_set('maintenance', 'Stopping Apache Spark')
    dist = DistConfig(data=layer.options('apache-spark'))
    spark = Spark(dist)
    spark.close_ports()
    spark.stop()
    remove_state('spark.started')
Example #8
0
def reconfigure_spark(hadoop):  # pylint: disable=w0613
    config = hookenv.config()
    if not data_changed("configuration", config):
        return

    hookenv.status_set("maintenance", "Configuring Apache Spark and Livy REST server")
    dist = DistConfig(data=layer.options("apache-spark"))
    spark = Spark(dist)
    dist = DistConfig(data=layer.options("livy"))
    livy = Livy(dist)

    livy.stop()
    spark.stop()
    spark.configure()
    mode = hookenv.config()["spark_execution_mode"]
    livy.configure(mode)
    spark.start()
    livy.start()
    hookenv.status_set("active", "Ready")