def stop_spark(): hookenv.status_set("maintenance", "Stopping Livy REST server") dist = DistConfig(data=layer.options("livy")) livy = Livy(dist) livy.close_ports() livy.stop() remove_state("livy.started") hookenv.status_set("maintenance", "Stopping Apache Spark") dist = DistConfig(data=layer.options("apache-spark")) spark = Spark(dist) spark.close_ports() spark.stop() remove_state("spark.started")
def stop_spark(): hookenv.status_set('maintenance', 'Stopping Livy REST server') dist = DistConfig(data=layer.options('livy')) livy = Livy(dist) livy.close_ports() livy.stop() remove_state('livy.started') hookenv.status_set('maintenance', 'Stopping Apache Spark') dist = DistConfig(data=layer.options('apache-spark')) spark = Spark(dist) spark.close_ports() spark.stop() remove_state('spark.started')
def reconfigure_spark(hadoop): # pylint: disable=w0613 config = hookenv.config() if not data_changed("configuration", config): return hookenv.status_set("maintenance", "Configuring Apache Spark and Livy REST server") dist = DistConfig(data=layer.options("apache-spark")) spark = Spark(dist) dist = DistConfig(data=layer.options("livy")) livy = Livy(dist) livy.stop() spark.stop() spark.configure() mode = hookenv.config()["spark_execution_mode"] livy.configure(mode) spark.start() livy.start() hookenv.status_set("active", "Ready")
def reconfigure_spark(hadoop): # pylint: disable=w0613 config = hookenv.config() if not data_changed('configuration', config): return hookenv.status_set('maintenance', 'Configuring Apache Spark and Livy REST server') dist = DistConfig(data=layer.options('apache-spark')) spark = Spark(dist) dist = DistConfig(data=layer.options('livy')) livy = Livy(dist) livy.stop() spark.stop() spark.configure() mode = hookenv.config()['spark_execution_mode'] livy.configure(mode) spark.start() livy.start() hookenv.status_set('active', 'Ready')