Ejemplo n.º 1
0
def start_spark(hadoop):  # pylint: disable=w0613
    hookenv.status_set('maintenance', 'Setting up Apache Spark')
    dist = DistConfig(data=layer.options('apache-spark'))
    spark = Spark(dist)
    spark.configure()
    spark.start()
    spark.open_ports()
    set_state('spark.started')
Ejemplo n.º 2
0
def install_spark(hadoop):  # pylint: disable=w0613
    dist = DistConfig(data=layer.options('apache-spark'))
    spark = Spark(dist)
    if spark.verify_resources():
        hookenv.status_set('maintenance', 'Installing Apache Spark')
        spark.install()
        spark.setup_spark_config()
        spark.install_demo()
        set_state('spark.installed')
Ejemplo n.º 3
0
def stop_spark():
    hookenv.status_set('maintenance', 'Stopping Livy REST server')
    dist = DistConfig(data=layer.options('livy'))
    livy = Livy(dist)
    livy.close_ports()
    livy.stop()
    remove_state('livy.started')

    hookenv.status_set('maintenance', 'Stopping Apache Spark')
    dist = DistConfig(data=layer.options('apache-spark'))
    spark = Spark(dist)
    spark.close_ports()
    spark.stop()
    remove_state('spark.started')
Ejemplo n.º 4
0
def reconfigure_spark(hadoop):  # pylint: disable=w0613
    config = hookenv.config()
    if not data_changed('configuration', config):
        return

    hookenv.status_set('maintenance',
                       'Configuring Apache Spark and Livy REST server')
    dist = DistConfig(data=layer.options('apache-spark'))
    spark = Spark(dist)
    dist = DistConfig(data=layer.options('livy'))
    livy = Livy(dist)

    livy.stop()
    spark.stop()
    spark.configure()
    mode = hookenv.config()['spark_execution_mode']
    livy.configure(mode)
    spark.start()
    livy.start()
    hookenv.status_set('active', 'Ready')