示例#1
0
def configure_spark(spark):
    master_url = spark.get_master_url()
    if data_changed('spark.master', master_url):
        hookenv.status_set('maintenance', 'configuring spark')
        zeppelin = Zeppelin()
        zeppelin.configure_spark(master_url)
        set_state('zeppelin.spark.configured')
        update_status()
示例#2
0
def register_notebook(client):
    zeppelin = Zeppelin()
    for notebook in client.unregistered_notebooks():
        notebook_md5 = hashlib.md5(notebook.encode('utf8')).hexdigest()
        if zeppelin.register_notebook(notebook_md5, notebook):
            client.accept_notebook(notebook)
        else:
            client.reject_notebook(notebook)
示例#3
0
文件: zeppelin.py 项目: apache/bigtop
def configure_spark(spark):
    master_url = spark.get_master_url()
    if data_changed("spark.master", master_url):
        hookenv.status_set("maintenance", "configuring spark")
        zeppelin = Zeppelin()
        zeppelin.configure_spark(master_url)
        set_state("zeppelin.spark.configured")
        update_status()
示例#4
0
文件: zeppelin.py 项目: apache/bigtop
def register_notebook(client):
    zeppelin = Zeppelin()
    for notebook in client.unregistered_notebooks():
        notebook_md5 = hashlib.md5(notebook.encode("utf8")).hexdigest()
        if zeppelin.register_notebook(notebook_md5, notebook):
            client.accept_notebook(notebook)
        else:
            client.reject_notebook(notebook)
示例#5
0
def unconfigure_spark():
    hookenv.status_set('maintenance', 'removing spark relation')
    zeppelin = Zeppelin()
    # Yarn / Hadoop may not actually be available, but that is the default
    # value and nothing else would reasonably work here either without Spark.
    zeppelin.configure_spark('yarn-client')
    data_changed('spark.master', 'yarn-client')  # ensure updated if re-added
    remove_state('zeppelin.spark.configured')
    update_status()
示例#6
0
文件: zeppelin.py 项目: apache/bigtop
def unconfigure_spark():
    hookenv.status_set("maintenance", "removing spark relation")
    zeppelin = Zeppelin()
    # Yarn / Hadoop may not actually be available, but that is the default
    # value and nothing else would reasonably work here either without Spark.
    zeppelin.configure_spark("yarn-client")
    data_changed("spark.master", "yarn-client")  # ensure updated if re-added
    remove_state("zeppelin.spark.configured")
    update_status()
示例#7
0
文件: zeppelin.py 项目: apache/bigtop
def initial_setup():
    hookenv.status_set('maintenance', 'installing zeppelin')
    zeppelin = Zeppelin()
    zeppelin.install()
    zeppelin.open_ports()
    set_state('zeppelin.installed')
    update_status()
    # set app version string for juju status output
    zeppelin_version = get_package_version('zeppelin') or 'unknown'
    hookenv.application_version_set(zeppelin_version)
示例#8
0
文件: zeppelin.py 项目: apache/bigtop
def configure_hive(hive):
    hive_ip = hive.get_private_ip()
    hive_port = hive.get_port()
    hive_url = "jdbc:hive2://%s:%s" % (hive_ip, hive_port)
    if data_changed("hive.connect", hive_url):
        hookenv.status_set("maintenance", "configuring hive")
        zeppelin = Zeppelin()
        zeppelin.configure_hive(hive_url)
        set_state("zeppelin.hive.configured")
        update_status()
示例#9
0
文件: zeppelin.py 项目: apache/bigtop
def configure_hive(hive):
    hive_ip = hive.get_private_ip()
    hive_port = hive.get_port()
    hive_url = 'jdbc:hive2://%s:%s' % (hive_ip, hive_port)
    if data_changed('hive.connect', hive_url):
        hookenv.status_set('maintenance', 'configuring hive')
        zeppelin = Zeppelin()
        zeppelin.configure_hive(hive_url)
        set_state('zeppelin.hive.configured')
        update_status()
示例#10
0
def configure_hive(hive):
    hive_ip = hive.get_private_ip()
    hive_port = hive.get_port()
    hive_url = 'jdbc:hive2://%s:%s' % (hive_ip, hive_port)
    if data_changed('hive.connect', hive_url):
        hookenv.status_set('maintenance', 'configuring hive')
        zeppelin = Zeppelin()
        zeppelin.configure_hive(hive_url)
        set_state('zeppelin.hive.configured')
        update_status()
示例#11
0
文件: zeppelin.py 项目: apache/bigtop
def unconfigure_spark():
    '''
    Remove remote Spark; reconfigure Zeppelin to use embedded Spark.
    '''
    hookenv.status_set('maintenance', 'removing spark relation')
    zeppelin = Zeppelin()

    # Zepp includes the spark-client role, so reconfigure our built-in spark
    # if our related spark has gone away.
    if is_state('zeppelin.hadoop.configured'):
        local_master = 'yarn-client'
    else:
        local_master = 'local[*]'
    zeppelin.configure_spark(local_master)
    data_changed('spark.master', local_master)  # ensure updated if re-added
    remove_state('zeppelin.spark.configured')
    update_status()
示例#12
0
def unconfigure_spark():
    '''
    Remove remote Spark; reconfigure Zeppelin to use embedded Spark.
    '''
    hookenv.status_set('maintenance', 'removing spark relation')
    zeppelin = Zeppelin()

    # Zepp includes the spark-client role, so reconfigure our built-in spark
    # if our related spark has gone away.
    if is_state('zeppelin.hadoop.configured'):
        local_master = 'yarn-client'
    else:
        local_master = 'local[*]'
    zeppelin.configure_spark(local_master)
    data_changed('spark.master', local_master)  # ensure updated if re-added
    remove_state('zeppelin.spark.configured')
    update_status()
示例#13
0
def check_repo_version():
    """
    Configure a bigtop site.yaml if a new version of zeppelin is available.

    This method will set unitdata if a different version of zeppelin is
    available in the newly configured bigtop repo. This unitdata allows us to
    configure site.yaml while gating the actual puppet apply. The user must do
    the puppet apply by calling the 'reinstall' action.
    """
    repo_ver = Bigtop().check_bigtop_repo_package('zeppelin')
    if repo_ver:
        unitdata.kv().set('zeppelin.version.repo', repo_ver)
        unitdata.kv().flush(True)
        zeppelin = Zeppelin()
        zeppelin.trigger_bigtop()
    else:
        unitdata.kv().unset('zeppelin.version.repo')
    update_status()
示例#14
0
文件: zeppelin.py 项目: apache/bigtop
def check_repo_version():
    """
    Configure a bigtop site.yaml if a new version of zeppelin is available.

    This method will set unitdata if a different version of zeppelin is
    available in the newly configured bigtop repo. This unitdata allows us to
    configure site.yaml while gating the actual puppet apply. The user must do
    the puppet apply by calling the 'reinstall' action.
    """
    repo_ver = Bigtop().check_bigtop_repo_package('zeppelin')
    if repo_ver:
        unitdata.kv().set('zeppelin.version.repo', repo_ver)
        unitdata.kv().flush(True)
        zeppelin = Zeppelin()
        zeppelin.trigger_bigtop()
    else:
        unitdata.kv().unset('zeppelin.version.repo')
    update_status()
示例#15
0
def initial_setup():
    hookenv.status_set('maintenance', 'installing zeppelin')
    zeppelin = Zeppelin()
    zeppelin.install()
    zeppelin.setup_etc_env()
    zeppelin.open_ports()
    set_state('zeppelin.installed')
    update_status()
    # set app version string for juju status output
    zeppelin_version = get_package_version('zeppelin') or 'unknown'
    hookenv.application_version_set(zeppelin_version)
示例#16
0
文件: zeppelin.py 项目: apache/bigtop
def initial_setup():
    hookenv.status_set("maintenance", "installing zeppelin")
    zeppelin = Zeppelin()
    zeppelin.install()
    zeppelin.setup_etc_env()
    zeppelin.open_ports()
    set_state("zeppelin.installed")
    update_status()
    # set app version string for juju status output
    zeppelin_version = get_package_version("zeppelin") or "unknown"
    hookenv.application_version_set(zeppelin_version)
示例#17
0
文件: zeppelin.py 项目: apache/bigtop
def configure_spark(spark):
    '''
    Configure Zeppelin to use remote Spark resources.
    '''
    # NB: Use the master_url string if it already starts with spark://.
    # Otherwise, it means the remote spark is in local or yarn mode -- that's
    # bad because using 'local' or 'yarn' here would cause zepp's spark-submit
    # to use the builtin spark, hence ignoring the remote spark. In this case,
    # set a state so we can inform the user that the remote spark is unusable.
    master_url = spark.get_master_url()

    if master_url.startswith('spark'):
        remove_state('spark.master.unusable')
        # Only (re)configure if our master url has changed.
        if data_changed('spark.master', master_url):
            hookenv.status_set('maintenance', 'configuring spark')
            zeppelin = Zeppelin()
            zeppelin.configure_spark(master_url)
            set_state('zeppelin.spark.configured')
    else:
        remove_state('zeppelin.spark.configured')
        set_state('spark.master.unusable')
    update_status()
示例#18
0
def configure_spark(spark):
    '''
    Configure Zeppelin to use remote Spark resources.
    '''
    # NB: Use the master_url string if it already starts with spark://.
    # Otherwise, it means the remote spark is in local or yarn mode -- that's
    # bad because using 'local' or 'yarn' here would cause zepp's spark-submit
    # to use the builtin spark, hence ignoring the remote spark. In this case,
    # set a state so we can inform the user that the remote spark is unusable.
    master_url = spark.get_master_url()

    if master_url.startswith('spark'):
        remove_state('spark.master.unusable')
        # Only (re)configure if our master url has changed.
        if data_changed('spark.master', master_url):
            hookenv.status_set('maintenance', 'configuring spark')
            zeppelin = Zeppelin()
            zeppelin.configure_spark(master_url)
            set_state('zeppelin.spark.configured')
    else:
        remove_state('zeppelin.spark.configured')
        set_state('spark.master.unusable')
    update_status()
示例#19
0
文件: zeppelin.py 项目: apache/bigtop
def unconfigure_hadoop():
    zeppelin = Zeppelin()
    zeppelin.remove_hadoop_notebooks()
    remove_state("zeppelin.hadoop.configured")
示例#20
0
文件: zeppelin.py 项目: apache/bigtop
def unconfigure_hive():
    hookenv.status_set('maintenance', 'removing hive relation')
    zeppelin = Zeppelin()
    zeppelin.configure_hive('jdbc:hive2://:')
    remove_state('zeppelin.hive.configured')
    update_status()
示例#21
0
def unconfigure_hive():
    hookenv.status_set('maintenance', 'removing hive relation')
    zeppelin = Zeppelin()
    zeppelin.configure_hive('jdbc:hive2://:')
    remove_state('zeppelin.hive.configured')
    update_status()
示例#22
0
文件: zeppelin.py 项目: apache/bigtop
def configure_hadoop(hadoop):
    zeppelin = Zeppelin()
    zeppelin.configure_hadoop()
    zeppelin.register_hadoop_notebooks()
    set_state("zeppelin.hadoop.configured")
示例#23
0
文件: zeppelin.py 项目: apache/bigtop
def remove_notebook(client):
    zeppelin = Zeppelin()
    for notebook in client.unremoved_notebooks():
        notebook_md5 = hashlib.md5(notebook.encode("utf8")).hexdigest()
        zeppelin.remove_notebook(notebook_md5)
        client.remove_notebook(notebook)
示例#24
0
def remove_notebook(client):
    zeppelin = Zeppelin()
    for notebook in client.unremoved_notebooks():
        notebook_md5 = hashlib.md5(notebook.encode('utf8')).hexdigest()
        zeppelin.remove_notebook(notebook_md5)
        client.remove_notebook(notebook)
示例#25
0
def configure_hadoop(hadoop):
    zeppelin = Zeppelin()
    zeppelin.configure_hadoop()
    zeppelin.register_hadoop_notebooks()
    set_state('zeppelin.hadoop.configured')
示例#26
0
def unconfigure_hadoop():
    zeppelin = Zeppelin()
    zeppelin.remove_hadoop_notebooks()
    remove_state('zeppelin.hadoop.configured')
示例#27
0
文件: zeppelin.py 项目: apache/bigtop
def unconfigure_hive():
    hookenv.status_set("maintenance", "removing hive relation")
    zeppelin = Zeppelin()
    zeppelin.configure_hive("jdbc:hive2://:")
    remove_state("zeppelin.hive.configured")
    update_status()