def install_livy(hadoop): # pylint: disable=w0613 dist = DistConfig(data=layer.options("livy")) livy = Livy(dist) if livy.verify_resources(): hookenv.status_set("maintenance", "Installing Livy REST server") livy.install() set_state("livy.installed")
def install_livy(hadoop): # pylint: disable=w0613 dist = DistConfig(data=layer.options('livy')) livy = Livy(dist) if livy.verify_resources(): hookenv.status_set('maintenance', 'Installing Livy REST server') livy.install() set_state('livy.installed')
def stop_spark(): hookenv.status_set("maintenance", "Stopping Livy REST server") dist = DistConfig(data=layer.options("livy")) livy = Livy(dist) livy.close_ports() livy.stop() remove_state("livy.started") hookenv.status_set("maintenance", "Stopping Apache Spark") dist = DistConfig(data=layer.options("apache-spark")) spark = Spark(dist) spark.close_ports() spark.stop() remove_state("spark.started")
def start_livy(hadoop): # pylint: disable=w0613 hookenv.status_set('maintenance', 'Setting up Livy REST server') dist = DistConfig(data=layer.options('livy')) livy = Livy(dist) mode = hookenv.config()['spark_execution_mode'] livy.configure(mode) livy.start() livy.open_ports() set_state('livy.started') hookenv.status_set('active', 'Ready')
def start_livy(hadoop): # pylint: disable=w0613 hookenv.status_set("maintenance", "Setting up Livy REST server") dist = DistConfig(data=layer.options("livy")) livy = Livy(dist) mode = hookenv.config()["spark_execution_mode"] livy.configure(mode) livy.start() livy.open_ports() set_state("livy.started") hookenv.status_set("active", "Ready")
def reconfigure_spark(hadoop): # pylint: disable=w0613 config = hookenv.config() if not data_changed('configuration', config): return hookenv.status_set('maintenance', 'Configuring Apache Spark and Livy REST server') dist = DistConfig(data=layer.options('apache-spark')) spark = Spark(dist) dist = DistConfig(data=layer.options('livy')) livy = Livy(dist) livy.stop() spark.stop() spark.configure() mode = hookenv.config()['spark_execution_mode'] livy.configure(mode) spark.start() livy.start() hookenv.status_set('active', 'Ready')
def stop_spark(): hookenv.status_set('maintenance', 'Stopping Livy REST server') dist = DistConfig(data=layer.options('livy')) livy = Livy(dist) livy.close_ports() livy.stop() remove_state('livy.started') hookenv.status_set('maintenance', 'Stopping Apache Spark') dist = DistConfig(data=layer.options('apache-spark')) spark = Spark(dist) spark.close_ports() spark.stop() remove_state('spark.started')
def reconfigure_spark(hadoop): # pylint: disable=w0613 config = hookenv.config() if not data_changed("configuration", config): return hookenv.status_set("maintenance", "Configuring Apache Spark and Livy REST server") dist = DistConfig(data=layer.options("apache-spark")) spark = Spark(dist) dist = DistConfig(data=layer.options("livy")) livy = Livy(dist) livy.stop() spark.stop() spark.configure() mode = hookenv.config()["spark_execution_mode"] livy.configure(mode) spark.start() livy.start() hookenv.status_set("active", "Ready")