Пример #1
0
def stop_spark():
    from charms.spark import Spark  # in lib/charms; not available until after bootstrap

    hookenv.status_set('maintenance', 'Stopping Apache Spark')
    spark = Spark()
    spark.close_ports()
    spark.stop()
Пример #2
0
def install_spark():
    from charms.spark import Spark  # in lib/charms; not available until after bootstrap

    spark = Spark()
    if spark.verify_resources():
        hookenv.status_set('maintenance', 'Installing Apache Spark')
        spark.install()
        set_state('spark.installed')
Пример #3
0
def install_spark(hadoop):  # pylint: disable=w0613
    dist = DistConfig(data=layer.options('apache-spark'))
    spark = Spark(dist)
    if spark.verify_resources():
        hookenv.status_set('maintenance', 'Installing Apache Spark')
        spark.install()
        spark.setup_spark_config()
        spark.install_demo()
        set_state('spark.installed')
Пример #4
0
def stop_spark():
    hookenv.status_set("maintenance", "Stopping Livy REST server")
    dist = DistConfig(data=layer.options("livy"))
    livy = Livy(dist)
    livy.close_ports()
    livy.stop()
    remove_state("livy.started")

    hookenv.status_set("maintenance", "Stopping Apache Spark")
    dist = DistConfig(data=layer.options("apache-spark"))
    spark = Spark(dist)
    spark.close_ports()
    spark.stop()
    remove_state("spark.started")
Пример #5
0
def start_spark(hadoop):  # pylint: disable=w0613
    hookenv.status_set('maintenance', 'Setting up Apache Spark')
    dist = DistConfig(data=layer.options('apache-spark'))
    spark = Spark(dist)
    spark.configure()
    spark.start()
    spark.open_ports()
    set_state('spark.started')
Пример #6
0
def install_spark(hadoop):  # pylint: disable=w0613
    dist = DistConfig(data=layer.options("apache-spark"))
    spark = Spark(dist)
    if spark.verify_resources():
        hookenv.status_set("maintenance", "Installing Apache Spark")
        spark.install()
        spark.setup_spark_config()
        spark.install_demo()
        set_state("spark.installed")
Пример #7
0
def start_spark(hadoop):  # pylint: disable=w0613
    hookenv.status_set("maintenance", "Setting up Apache Spark")
    dist = DistConfig(data=layer.options("apache-spark"))
    spark = Spark(dist)
    spark.configure()
    spark.start()
    spark.open_ports()
    set_state("spark.started")
Пример #8
0
def install_spark(hadoop):

    dist = get_dist_config()
    spark = Spark(dist)
    if spark.verify_resources():
        hookenv.status_set('maintenance', 'Installing Apache Spark')
        dist.add_dirs()
        dist.add_packages()
        spark.install()
        spark.setup_spark_config()
        spark.install_demo()
        set_state('spark.installed')
Пример #9
0
def start_spark(hadoop):
    hookenv.status_set('maintenance', 'Setting up Apache Spark')
    spark = Spark(get_dist_config())

    spark.configure()
    spark.start()
    spark.open_ports()
    set_state('spark.started')
    hookenv.status_set('active', 'Ready')
Пример #10
0
def start_spark(*args):
    from charms.spark import Spark  # in lib/charms; not available until after bootstrap

    hookenv.status_set('maintenance', 'Setting up Apache Spark')
    spark = Spark()
    spark.configure()
    spark.start()
    spark.open_ports()
    set_state('spark.started')
    hookenv.status_set('active', 'Ready')
Пример #11
0
def reconfigure_spark(hadoop):  # pylint: disable=w0613
    config = hookenv.config()
    if not data_changed('configuration', config):
        return

    hookenv.status_set('maintenance',
                       'Configuring Apache Spark and Livy REST server')
    dist = DistConfig(data=layer.options('apache-spark'))
    spark = Spark(dist)
    dist = DistConfig(data=layer.options('livy'))
    livy = Livy(dist)

    livy.stop()
    spark.stop()
    spark.configure()
    mode = hookenv.config()['spark_execution_mode']
    livy.configure(mode)
    spark.start()
    livy.start()
    hookenv.status_set('active', 'Ready')
Пример #12
0
def stop_spark():
    hookenv.status_set('maintenance', 'Stopping Livy REST server')
    dist = DistConfig(data=layer.options('livy'))
    livy = Livy(dist)
    livy.close_ports()
    livy.stop()
    remove_state('livy.started')

    hookenv.status_set('maintenance', 'Stopping Apache Spark')
    dist = DistConfig(data=layer.options('apache-spark'))
    spark = Spark(dist)
    spark.close_ports()
    spark.stop()
    remove_state('spark.started')
Пример #13
0
def reconfigure_spark(hadoop):  # pylint: disable=w0613
    config = hookenv.config()
    if not data_changed("configuration", config):
        return

    hookenv.status_set("maintenance", "Configuring Apache Spark and Livy REST server")
    dist = DistConfig(data=layer.options("apache-spark"))
    spark = Spark(dist)
    dist = DistConfig(data=layer.options("livy"))
    livy = Livy(dist)

    livy.stop()
    spark.stop()
    spark.configure()
    mode = hookenv.config()["spark_execution_mode"]
    livy.configure(mode)
    spark.start()
    livy.start()
    hookenv.status_set("active", "Ready")
Пример #14
0
def stop_spark():
    hookenv.status_set('maintenance', 'Stopping Apache Spark')
    spark = Spark(get_dist_config())
    spark.close_ports()
    spark.stop()
    remove_state('spark.started')