コード例 #1
0
def configure_spark(spark):
    master_url = spark.get_master_url()
    if data_changed('spark.master', master_url):
        hookenv.status_set('maintenance', 'configuring spark')
        zeppelin = Zeppelin()
        zeppelin.configure_spark(master_url)
        set_state('zeppelin.spark.configured')
        update_status()
コード例 #2
0
ファイル: zeppelin.py プロジェクト: apache/bigtop
def configure_spark(spark):
    master_url = spark.get_master_url()
    if data_changed("spark.master", master_url):
        hookenv.status_set("maintenance", "configuring spark")
        zeppelin = Zeppelin()
        zeppelin.configure_spark(master_url)
        set_state("zeppelin.spark.configured")
        update_status()
コード例 #3
0
ファイル: zeppelin.py プロジェクト: apache/bigtop
def unconfigure_spark():
    hookenv.status_set("maintenance", "removing spark relation")
    zeppelin = Zeppelin()
    # Yarn / Hadoop may not actually be available, but that is the default
    # value and nothing else would reasonably work here either without Spark.
    zeppelin.configure_spark("yarn-client")
    data_changed("spark.master", "yarn-client")  # ensure updated if re-added
    remove_state("zeppelin.spark.configured")
    update_status()
コード例 #4
0
def unconfigure_spark():
    hookenv.status_set('maintenance', 'removing spark relation')
    zeppelin = Zeppelin()
    # Yarn / Hadoop may not actually be available, but that is the default
    # value and nothing else would reasonably work here either without Spark.
    zeppelin.configure_spark('yarn-client')
    data_changed('spark.master', 'yarn-client')  # ensure updated if re-added
    remove_state('zeppelin.spark.configured')
    update_status()
コード例 #5
0
ファイル: zeppelin.py プロジェクト: apache/bigtop
def unconfigure_spark():
    '''
    Remove remote Spark; reconfigure Zeppelin to use embedded Spark.
    '''
    hookenv.status_set('maintenance', 'removing spark relation')
    zeppelin = Zeppelin()

    # Zepp includes the spark-client role, so reconfigure our built-in spark
    # if our related spark has gone away.
    if is_state('zeppelin.hadoop.configured'):
        local_master = 'yarn-client'
    else:
        local_master = 'local[*]'
    zeppelin.configure_spark(local_master)
    data_changed('spark.master', local_master)  # ensure updated if re-added
    remove_state('zeppelin.spark.configured')
    update_status()
コード例 #6
0
def unconfigure_spark():
    '''
    Remove remote Spark; reconfigure Zeppelin to use embedded Spark.
    '''
    hookenv.status_set('maintenance', 'removing spark relation')
    zeppelin = Zeppelin()

    # Zepp includes the spark-client role, so reconfigure our built-in spark
    # if our related spark has gone away.
    if is_state('zeppelin.hadoop.configured'):
        local_master = 'yarn-client'
    else:
        local_master = 'local[*]'
    zeppelin.configure_spark(local_master)
    data_changed('spark.master', local_master)  # ensure updated if re-added
    remove_state('zeppelin.spark.configured')
    update_status()
コード例 #7
0
ファイル: zeppelin.py プロジェクト: apache/bigtop
def configure_spark(spark):
    '''
    Configure Zeppelin to use remote Spark resources.
    '''
    # NB: Use the master_url string if it already starts with spark://.
    # Otherwise, it means the remote spark is in local or yarn mode -- that's
    # bad because using 'local' or 'yarn' here would cause zepp's spark-submit
    # to use the builtin spark, hence ignoring the remote spark. In this case,
    # set a state so we can inform the user that the remote spark is unusable.
    master_url = spark.get_master_url()

    if master_url.startswith('spark'):
        remove_state('spark.master.unusable')
        # Only (re)configure if our master url has changed.
        if data_changed('spark.master', master_url):
            hookenv.status_set('maintenance', 'configuring spark')
            zeppelin = Zeppelin()
            zeppelin.configure_spark(master_url)
            set_state('zeppelin.spark.configured')
    else:
        remove_state('zeppelin.spark.configured')
        set_state('spark.master.unusable')
    update_status()
コード例 #8
0
def configure_spark(spark):
    '''
    Configure Zeppelin to use remote Spark resources.
    '''
    # NB: Use the master_url string if it already starts with spark://.
    # Otherwise, it means the remote spark is in local or yarn mode -- that's
    # bad because using 'local' or 'yarn' here would cause zepp's spark-submit
    # to use the builtin spark, hence ignoring the remote spark. In this case,
    # set a state so we can inform the user that the remote spark is unusable.
    master_url = spark.get_master_url()

    if master_url.startswith('spark'):
        remove_state('spark.master.unusable')
        # Only (re)configure if our master url has changed.
        if data_changed('spark.master', master_url):
            hookenv.status_set('maintenance', 'configuring spark')
            zeppelin = Zeppelin()
            zeppelin.configure_spark(master_url)
            set_state('zeppelin.spark.configured')
    else:
        remove_state('zeppelin.spark.configured')
        set_state('spark.master.unusable')
    update_status()