def stop_spark(): from charms.spark import Spark # in lib/charms; not available until after bootstrap hookenv.status_set('maintenance', 'Stopping Apache Spark') spark = Spark() spark.close_ports() spark.stop()
def install_spark(): from charms.spark import Spark # in lib/charms; not available until after bootstrap spark = Spark() if spark.verify_resources(): hookenv.status_set('maintenance', 'Installing Apache Spark') spark.install() set_state('spark.installed')
def install_spark(hadoop): # pylint: disable=w0613 dist = DistConfig(data=layer.options('apache-spark')) spark = Spark(dist) if spark.verify_resources(): hookenv.status_set('maintenance', 'Installing Apache Spark') spark.install() spark.setup_spark_config() spark.install_demo() set_state('spark.installed')
def stop_spark(): hookenv.status_set("maintenance", "Stopping Livy REST server") dist = DistConfig(data=layer.options("livy")) livy = Livy(dist) livy.close_ports() livy.stop() remove_state("livy.started") hookenv.status_set("maintenance", "Stopping Apache Spark") dist = DistConfig(data=layer.options("apache-spark")) spark = Spark(dist) spark.close_ports() spark.stop() remove_state("spark.started")
def start_spark(hadoop): # pylint: disable=w0613 hookenv.status_set('maintenance', 'Setting up Apache Spark') dist = DistConfig(data=layer.options('apache-spark')) spark = Spark(dist) spark.configure() spark.start() spark.open_ports() set_state('spark.started')
def install_spark(hadoop): # pylint: disable=w0613 dist = DistConfig(data=layer.options("apache-spark")) spark = Spark(dist) if spark.verify_resources(): hookenv.status_set("maintenance", "Installing Apache Spark") spark.install() spark.setup_spark_config() spark.install_demo() set_state("spark.installed")
def start_spark(hadoop): # pylint: disable=w0613 hookenv.status_set("maintenance", "Setting up Apache Spark") dist = DistConfig(data=layer.options("apache-spark")) spark = Spark(dist) spark.configure() spark.start() spark.open_ports() set_state("spark.started")
def install_spark(hadoop): dist = get_dist_config() spark = Spark(dist) if spark.verify_resources(): hookenv.status_set('maintenance', 'Installing Apache Spark') dist.add_dirs() dist.add_packages() spark.install() spark.setup_spark_config() spark.install_demo() set_state('spark.installed')
def start_spark(hadoop): hookenv.status_set('maintenance', 'Setting up Apache Spark') spark = Spark(get_dist_config()) spark.configure() spark.start() spark.open_ports() set_state('spark.started') hookenv.status_set('active', 'Ready')
def start_spark(*args): from charms.spark import Spark # in lib/charms; not available until after bootstrap hookenv.status_set('maintenance', 'Setting up Apache Spark') spark = Spark() spark.configure() spark.start() spark.open_ports() set_state('spark.started') hookenv.status_set('active', 'Ready')
def reconfigure_spark(hadoop): # pylint: disable=w0613 config = hookenv.config() if not data_changed('configuration', config): return hookenv.status_set('maintenance', 'Configuring Apache Spark and Livy REST server') dist = DistConfig(data=layer.options('apache-spark')) spark = Spark(dist) dist = DistConfig(data=layer.options('livy')) livy = Livy(dist) livy.stop() spark.stop() spark.configure() mode = hookenv.config()['spark_execution_mode'] livy.configure(mode) spark.start() livy.start() hookenv.status_set('active', 'Ready')
def stop_spark(): hookenv.status_set('maintenance', 'Stopping Livy REST server') dist = DistConfig(data=layer.options('livy')) livy = Livy(dist) livy.close_ports() livy.stop() remove_state('livy.started') hookenv.status_set('maintenance', 'Stopping Apache Spark') dist = DistConfig(data=layer.options('apache-spark')) spark = Spark(dist) spark.close_ports() spark.stop() remove_state('spark.started')
def reconfigure_spark(hadoop): # pylint: disable=w0613 config = hookenv.config() if not data_changed("configuration", config): return hookenv.status_set("maintenance", "Configuring Apache Spark and Livy REST server") dist = DistConfig(data=layer.options("apache-spark")) spark = Spark(dist) dist = DistConfig(data=layer.options("livy")) livy = Livy(dist) livy.stop() spark.stop() spark.configure() mode = hookenv.config()["spark_execution_mode"] livy.configure(mode) spark.start() livy.start() hookenv.status_set("active", "Ready")
def stop_spark(): hookenv.status_set('maintenance', 'Stopping Apache Spark') spark = Spark(get_dist_config()) spark.close_ports() spark.stop() remove_state('spark.started')