def start_spark(hadoop): # pylint: disable=w0613 hookenv.status_set('maintenance', 'Setting up Apache Spark') dist = DistConfig(data=layer.options('apache-spark')) spark = Spark(dist) spark.configure() spark.start() spark.open_ports() set_state('spark.started')
def install_spark(hadoop): # pylint: disable=w0613 dist = DistConfig(data=layer.options('apache-spark')) spark = Spark(dist) if spark.verify_resources(): hookenv.status_set('maintenance', 'Installing Apache Spark') spark.install() spark.setup_spark_config() spark.install_demo() set_state('spark.installed')
def stop_spark(): hookenv.status_set('maintenance', 'Stopping Livy REST server') dist = DistConfig(data=layer.options('livy')) livy = Livy(dist) livy.close_ports() livy.stop() remove_state('livy.started') hookenv.status_set('maintenance', 'Stopping Apache Spark') dist = DistConfig(data=layer.options('apache-spark')) spark = Spark(dist) spark.close_ports() spark.stop() remove_state('spark.started')
def reconfigure_spark(hadoop): # pylint: disable=w0613 config = hookenv.config() if not data_changed('configuration', config): return hookenv.status_set('maintenance', 'Configuring Apache Spark and Livy REST server') dist = DistConfig(data=layer.options('apache-spark')) spark = Spark(dist) dist = DistConfig(data=layer.options('livy')) livy = Livy(dist) livy.stop() spark.stop() spark.configure() mode = hookenv.config()['spark_execution_mode'] livy.configure(mode) spark.start() livy.start() hookenv.status_set('active', 'Ready')