def start_livy(hadoop): # pylint: disable=w0613 hookenv.status_set("maintenance", "Setting up Livy REST server") dist = DistConfig(data=layer.options("livy")) livy = Livy(dist) mode = hookenv.config()["spark_execution_mode"] livy.configure(mode) livy.start() livy.open_ports() set_state("livy.started") hookenv.status_set("active", "Ready")
def start_livy(hadoop): # pylint: disable=w0613 hookenv.status_set('maintenance', 'Setting up Livy REST server') dist = DistConfig(data=layer.options('livy')) livy = Livy(dist) mode = hookenv.config()['spark_execution_mode'] livy.configure(mode) livy.start() livy.open_ports() set_state('livy.started') hookenv.status_set('active', 'Ready')
def reconfigure_spark(hadoop): # pylint: disable=w0613 config = hookenv.config() if not data_changed("configuration", config): return hookenv.status_set("maintenance", "Configuring Apache Spark and Livy REST server") dist = DistConfig(data=layer.options("apache-spark")) spark = Spark(dist) dist = DistConfig(data=layer.options("livy")) livy = Livy(dist) livy.stop() spark.stop() spark.configure() mode = hookenv.config()["spark_execution_mode"] livy.configure(mode) spark.start() livy.start() hookenv.status_set("active", "Ready")
def reconfigure_spark(hadoop): # pylint: disable=w0613 config = hookenv.config() if not data_changed('configuration', config): return hookenv.status_set('maintenance', 'Configuring Apache Spark and Livy REST server') dist = DistConfig(data=layer.options('apache-spark')) spark = Spark(dist) dist = DistConfig(data=layer.options('livy')) livy = Livy(dist) livy.stop() spark.stop() spark.configure() mode = hookenv.config()['spark_execution_mode'] livy.configure(mode) spark.start() livy.start() hookenv.status_set('active', 'Ready')