def reconfigure_spark(): config = hookenv.config() maintenance = config['maintenance_mode'] if maintenance: remove_state('not.upgrading') spark = Spark(get_dist_config()) report_status(spark) spark.stop() current_version = spark.get_current_version() if config['upgrade_immediately'] and config['spark_version'] != current_version: upgrade_spark() return else: set_state('not.upgrading') mode = hookenv.config()['spark_execution_mode'] hookenv.status_set('maintenance', 'Configuring Apache Spark') spark = Spark(get_dist_config()) spark.stop() if is_state('hadoop.ready') and mode.startswith('yarn') and (not is_state('yarn.configured')): # was in a mode other than yarn, going to yarn hookenv.status_set('maintenance', 'Setting up Apache Spark for YARN') spark.configure_yarn_mode() set_state('yarn.configured') if is_state('hadoop.ready') and (not mode.startswith('yarn')) and is_state('yarn.configured'): # was in a yarn mode and going to another mode hookenv.status_set('maintenance', 'Disconnecting Apache Spark from YARN') spark.disable_yarn_mode() remove_state('yarn.configured') spark.configure() spark.start() report_status(spark)
def disable_yarn(): hookenv.status_set('maintenance', 'Disconnecting Apache Spark from YARN') spark = Spark(get_dist_config()) spark.stop() spark.disable_yarn_mode() spark.start() remove_state('yarn.configured') report_status(spark)
def disable_zookeepers(): hookenv.status_set('maintenance', 'Disabling high availability') spark = Spark(get_dist_config()) spark.stop() spark.disable_ha() spark.configure() spark.start() remove_state('zookeeper.configured') report_status(spark)
def configure_zookeepers(zk): zks = zk.zookeepers() if data_changed('available.zookeepers', zks): spark = Spark(get_dist_config()) hookenv.status_set('maintenance', 'Updating Apache Spark HA') spark.stop() spark.configure_ha(zks) spark.configure() spark.start() set_state('zookeeper.configured') report_status(spark)
def update_peers_config(nodes): nodes.sort() if data_changed('available.peers', nodes): spark = Spark(get_dist_config()) # We need to reconfigure spark only if the master changes or if we # are in HA mode and a new potential master is added if spark.update_peers(nodes): hookenv.status_set('maintenance', 'Updating Apache Spark config') spark.stop() spark.configure() spark.start() report_status(spark)
def switch_to_yarn(hadoop): ''' In case you first change the config and then connect the plugin. ''' mode = hookenv.config()['spark_execution_mode'] if mode.startswith('yarn'): spark = Spark(get_dist_config()) hookenv.status_set('maintenance', 'Setting up Apache Spark for YARN') spark.stop() spark.configure_yarn_mode() set_state('yarn.configured') spark.configure() spark.start() report_status(spark)
def reconfigure_spark(): mode = hookenv.config()['spark_execution_mode'] hookenv.status_set('maintenance', 'Configuring Apache Spark') spark = Spark(get_dist_config()) spark.stop() if is_state('hadoop.ready') and mode.startswith('yarn') and (not is_state('yarn.configured')): # was in a mode other than yarn, going to yarn hookenv.status_set('maintenance', 'Setting up Apache Spark for YARN') spark.configure_yarn_mode() set_state('yarn.configured') if is_state('hadoop.ready') and (not mode.startswith('yarn')) and is_state('yarn.configured'): # was in a yarn mode and going to another mode hookenv.status_set('maintenance', 'Disconnecting Apache Spark from YARN') spark.disable_yarn_mode() remove_state('yarn.configured') spark.configure() spark.start() report_status(spark)