def configure_flume(hdfs):
    hookenv.status_set('maintenance', 'Setting up Flume')
    flume = Flume(get_dist_config())
    flume.configure_flume()
    flume.restart()
    set_state('flumehdfs.started')
    hookenv.status_set('active', 'Ready (Accepting agent connections)')
def stop_hive_wait_hdfs(db):
    hookenv.status_set('maintenance', 'Stopping Apache Hive')
    hive = Hive(get_dist_config())
    hive.stop()
    hive.close_ports()
    remove_state('hive.started')
    hookenv.status_set('blocked', 'Waiting for Hadoop connection')
Beispiel #3
0
def stop_hive_wait_hdfs(db):
    hookenv.status_set('maintenance', 'Stopping Apache Hive')
    hive = Hive(get_dist_config())
    hive.stop()
    hive.close_ports()
    remove_state('hive.started')
    hookenv.status_set('blocked', 'Waiting for Hadoop connection')
def start_spark(hadoop):
    hookenv.status_set('maintenance', 'Setting up Apache Spark')
    spark = Spark(get_dist_config())

    spark.configure()
    spark.start()
    spark.open_ports()
    set_state('spark.started')
    hookenv.status_set('active', 'Ready')
def start_hive(hdfs, database):
    hookenv.status_set('maintenance', 'Setting up Apache Hive')
    hive = Hive(get_dist_config())
    hive.setup_hive_config()
    hive.configure_hive(database)
    hive.open_ports()
    hive.start()
    set_state('hive.started')
    hookenv.status_set('active', 'Ready')
def monitor_config_changes(hdfs):
    hookenv.status_set('active', 'Ready (Accepting agent connections)')
    config = hookenv.config()
    if not data_changed('configuration', config):
        return

    flume = Flume(get_dist_config())
    flume.configure_flume()
    flume.restart()
Beispiel #7
0
def start_hive(hdfs, database):
    hookenv.status_set('maintenance', 'Setting up Apache Hive')
    hive = Hive(get_dist_config())
    hive.setup_hive_config()
    hive.configure_hive(database)
    hive.open_ports()
    hive.start()
    set_state('hive.started')
    hookenv.status_set('active', 'Ready')
def install_spark(hadoop):

    dist = get_dist_config()
    spark = Spark(dist)
    if spark.verify_resources():
        hookenv.status_set('maintenance', 'Installing Apache Spark')
        dist.add_dirs()
        dist.add_packages()
        spark.install()
        spark.setup_spark_config()
        spark.install_demo()
        set_state('spark.installed')
def install_hive(hadoop):
    hookenv.status_set('maintenance', 'Installing base resources')

    # Hive cannot handle - in the metastore db name and
    # mysql uses the service name to name the db
    if "-" in hookenv.service_name():
        hookenv.status_set('blocked', 'Service name should not contain -. Redeploy with a different name.')
        return False

    hive = Hive(get_dist_config())
    if hive.verify_resources():
        hookenv.status_set('maintenance', 'Installing Apache Hive')
        hive.install()
        set_state('hive.installed')
        return True

    return False
Beispiel #10
0
def install_hive(hadoop):
    hookenv.status_set('maintenance', 'Installing base resources')

    # Hive cannot handle - in the metastore db name and
    # mysql uses the service name to name the db
    if "-" in hookenv.service_name():
        hookenv.status_set(
            'blocked',
            'Service name should not contain -. Redeploy with a different name.'
        )
        return False

    hive = Hive(get_dist_config())
    if hive.verify_resources():
        hookenv.status_set('maintenance', 'Installing Apache Hive')
        hive.install()
        set_state('hive.installed')
        return True

    return False
def set_installed(client):
    dist = get_dist_config()
    client.set_installed(dist.hadoop_version)
Beispiel #12
0
def missing_mysql():
    hive = Hive(get_dist_config())
    hive.new_db_connection()
    hookenv.status_set('blocked', 'Waiting for relation to database')
Beispiel #13
0
def client_joined(hive):
    dist = get_dist_config()
    port = dist.port('hive')
    hive.send_port(port)
    hive.set_ready()
    set_state('client.configured')
Beispiel #14
0
def missing_mysql():
    hive = Hive(get_dist_config())
    hive.new_db_connection()
    hookenv.status_set('blocked', 'Waiting for relation to database')
Beispiel #15
0
def client_joined(hive):
    dist = get_dist_config()
    port = dist.port('hive')
    hive.send_port(port)
    hive.set_ready()
    set_state('client.configured')
def stop_spark():
    hookenv.status_set('maintenance', 'Stopping Apache Spark')
    spark = Spark(get_dist_config())
    spark.close_ports()
    spark.stop()
    remove_state('spark.started')
def client_present(client):
    dist = get_dist_config()
    rest_port = dist.port('livy')
    client.send_rest_port(rest_port)
    client.set_spark_started()
def install_flume(*args):
    flume = Flume(get_dist_config())
    if flume.verify_resources():
        hookenv.status_set('maintenance', 'Installing Flume')
        flume.install()
        set_state('flumehdfs.installed')