示例#1
0
def reconfigure_spark():
    config = hookenv.config()
    maintenance = config['maintenance_mode']
    if maintenance:
        remove_state('not.upgrading')
        spark = Spark(get_dist_config())
        report_status(spark)
        spark.stop()
        current_version = spark.get_current_version()
        if config['upgrade_immediately'] and config['spark_version'] != current_version:
            upgrade_spark()
        return
    else:
        set_state('not.upgrading')

    mode = hookenv.config()['spark_execution_mode']
    hookenv.status_set('maintenance', 'Configuring Apache Spark')
    spark = Spark(get_dist_config())
    spark.stop()
    if is_state('hadoop.ready') and mode.startswith('yarn') and (not is_state('yarn.configured')):
        # was in a mode other than yarn, going to yarn
        hookenv.status_set('maintenance', 'Setting up Apache Spark for YARN')
        spark.configure_yarn_mode()
        set_state('yarn.configured')

    if is_state('hadoop.ready') and (not mode.startswith('yarn')) and is_state('yarn.configured'):
        # was in a yarn mode and going to another mode
        hookenv.status_set('maintenance', 'Disconnecting Apache Spark from YARN')
        spark.disable_yarn_mode()
        remove_state('yarn.configured')

    spark.configure()
    spark.start()
    report_status(spark)
示例#2
0
def install_oozie(hadoop):
    hookenv.status_set('maintenance', 'Installing Apache Oozie')

    oozie = Oozie(get_dist_config())
    if oozie.verify_resources():
        oozie.install()
        set_state('oozie.installed')
示例#3
0
def reconfigure_hive(database):
    hookenv.status_set('active', 'Configuring Hive')
    hive = Hive(get_dist_config())
    hive.stop()
    hive.configure_hive(database)
    hive.start()
    hookenv.status_set('active', 'Ready')
示例#4
0
def configure_zookeeper(zks):
    hookenv.status_set('maintenance', 'Configuring Hue for Zookeeper')
    hue = Hue(get_dist_config())
    hue.configure_zookeeper(zks.zookeepers())
    hue.update_apps()
    hue.restart()
    set_state('zookeeper.configured')
示例#5
0
def stop_hive_wait_db(hdfs):
    hookenv.status_set('maintenance', 'Stopping Apache Hive')
    hive = Hive(get_dist_config())
    hive.stop()
    hive.close_ports()
    remove_state('hive.started')
    hookenv.status_set('blocked', 'Waiting for database connection')
示例#6
0
def configure_zookeeper(zks):
    hookenv.status_set('maintenance', 'Configuring Hue for Zookeeper')
    hue = Hue(get_dist_config())
    hue.configure_zookeeper(zks.zookeepers())
    hue.update_apps()
    hue.restart()
    set_state('zookeeper.configured')
def install_sqoop(db):
    hookenv.status_set('maintenance', 'Installing Sqoop Server')
    dist = get_dist_config()
    sqoop = Sqoop(dist)
    sqoop.install_sqoop()
    sqoop.open_ports()
    sqoop.start()
    set_state('sqoop.installed')
示例#8
0
def start_spark():
    hookenv.status_set('maintenance', 'Setting up Apache Spark')
    spark = Spark(get_dist_config())
    spark.configure()
    spark.start()
    spark.open_ports()
    set_state('spark.started')
    report_status(spark)
示例#9
0
def configure_oozie(oozie):
    oozie_host = oozie.get_private_ip()
    oozie_port = oozie.get_port()
    hue = Hue(get_dist_config())
    hue.configure_oozie(oozie_host, oozie_port)
    hue.update_apps()
    hue.restart()
    set_state('oozie.configured')
示例#10
0
def start_spark():
    hookenv.status_set('maintenance', 'Setting up Apache Spark')
    spark = Spark(get_dist_config())
    spark.configure()
    spark.start()
    spark.open_ports()
    set_state('spark.started')
    report_status(spark)
示例#11
0
def configure_oozie(oozie):
    oozie_host = oozie.get_private_ip()
    oozie_port = oozie.get_port()
    hue = Hue(get_dist_config())
    hue.configure_oozie(oozie_host, oozie_port)
    hue.update_apps()
    hue.restart()
    set_state('oozie.configured')
示例#12
0
def disable_yarn():
    hookenv.status_set('maintenance', 'Disconnecting Apache Spark from YARN')
    spark = Spark(get_dist_config())
    spark.stop()
    spark.disable_yarn_mode()
    spark.start()
    remove_state('yarn.configured')
    report_status(spark)
def install_oozie(db):
    hookenv.status_set('maintenance', 'Installing Oozie')
    dist = get_dist_config()
    oozie = Oozie(dist)
    oozie.install_oozie()
    oozie.open_ports()
    oozie.start()
    set_state('oozie.installed')
def install_sqoop(db):
    hookenv.status_set('maintenance', 'Installing Sqoop Server')
    dist = get_dist_config()
    sqoop = Sqoop(dist)
    sqoop.install_sqoop()
    sqoop.open_ports()
    sqoop.start()
    set_state('sqoop.installed')
示例#15
0
def disable_yarn():
    hookenv.status_set('maintenance', 'Disconnecting Apache Spark from YARN')
    spark = Spark(get_dist_config())
    spark.stop()
    spark.disable_yarn_mode()
    spark.start()
    remove_state('yarn.configured')
    report_status(spark)
示例#16
0
def disable_zookeepers():
    hookenv.status_set('maintenance', 'Disabling high availability')
    spark = Spark(get_dist_config())
    spark.stop()
    spark.disable_ha()
    spark.configure()
    spark.start()
    remove_state('zookeeper.configured')
    report_status(spark)
def stop_datagrid_services():
    dc = get_dist_config()
    ie_home = dc.path('spark')
    if utils.jps("insightedge.marker=master"):
        d = dict(os.environ)
        d["TIMEOUT"] = str(10)
        subprocess.call([ie_home / "sbin" / "stop-datagrid-master.sh"], env=d)
    if utils.jps("insightedge.marker=slave"):
        subprocess.call([ie_home / "sbin" / "stop-datagrid-slave.sh"])
示例#18
0
def install_spark():
    dist = get_dist_config()
    spark = Spark(dist)
    if spark.verify_resources():
        hookenv.status_set('maintenance', 'Installing Apache Spark')
        spark.install()
        spark.setup_spark_config()
        spark.install_demo()
        set_state('spark.installed')
示例#19
0
def configure_spark(spark):
    hookenv.status_set('maintenance', 'Configuring Hue for Spark')
    spark_host = spark.get_private_ip()
    spark_rest_port = spark.get_rest_port()
    hue = Hue(get_dist_config())
    hue.configure_spark(spark_host, spark_rest_port)
    hue.update_apps()
    hue.restart()
    set_state('spark.configured')
示例#20
0
def configure_spark(spark):
    hookenv.status_set('maintenance', 'Configuring Hue for Spark')
    spark_host = spark.get_private_ip()
    spark_rest_port = spark.get_rest_port()
    hue = Hue(get_dist_config())
    hue.configure_spark(spark_host, spark_rest_port)
    hue.update_apps()
    hue.restart()
    set_state('spark.configured')
示例#21
0
def configure_hive(hive):
    hookenv.status_set('maintenance', 'Configuring Hue for Hive')
    hive_host = hive.get_private_ip()
    hive_port = hive.get_port()
    hue = Hue(get_dist_config())
    hue.configure_hive(hive_host, hive_port)
    hue.update_apps()
    hue.restart()
    set_state('hive.configured')
示例#22
0
def start_hive(hdfs, database):
    hookenv.status_set('maintenance', 'Setting up Apache Hive')
    hive = Hive(get_dist_config())
    hive.setup_hive_config()
    hive.configure_hive(database)
    hive.open_ports()
    hive.start()
    set_state('hive.started')
    hookenv.status_set('active', 'Ready')
示例#23
0
def configure_hive(hive):
    hookenv.status_set('maintenance', 'Configuring Hue for Hive')
    hive_host = hive.get_private_ip()
    hive_port = hive.get_port()
    hue = Hue(get_dist_config())
    hue.configure_hive(hive_host, hive_port)
    hue.update_apps()
    hue.restart()
    set_state('hive.configured')
示例#24
0
def install_pig():
    pig = Pig(get_dist_config())
    if pig.verify_resources():
        hookenv.status_set('maintenance', 'installing pig')
        hookenv.log('Installing Apache Pig')
        pig.install()
        pig.initial_config()
        set_state('pig.installed')
        hookenv.status_set('waiting', 'waiting to configure pig')
        hookenv.log('Apache Pig is installed and ready to be configured')
示例#25
0
def configure_hue(hadoop):
    namenodes = hadoop.namenodes()
    resmngmrs = hadoop.resourcemanagers()
    hdfs_port = hadoop.hdfs_port()
    yarn_port = hadoop.yarn_port()
    hookenv.status_set('maintenance', 'Setting up Hue')
    hue = Hue(get_dist_config())
    hue.setup_hue(namenodes, resmngmrs, hdfs_port,
                  yarn_port, yarn_http, yarn_ipcp)
    set_state('hue.configured')
示例#26
0
def configure_zookeepers(zk):
    zks = zk.zookeepers()
    if data_changed('available.zookeepers', zks):
        spark = Spark(get_dist_config())
        hookenv.status_set('maintenance', 'Updating Apache Spark HA')
        spark.stop()
        spark.configure_ha(zks)
        spark.configure()
        spark.start()
        set_state('zookeeper.configured')
        report_status(spark)
示例#27
0
def configure_zookeepers(zk):
    zks = zk.zookeepers()
    if data_changed('available.zookeepers', zks):
        spark = Spark(get_dist_config())
        hookenv.status_set('maintenance', 'Updating Apache Spark HA')
        spark.stop()
        spark.configure_ha(zks)
        spark.configure()
        spark.start()
        set_state('zookeeper.configured')
        report_status(spark)
示例#28
0
def update_peers_config(nodes):
    nodes.sort()
    if data_changed('available.peers', nodes):
        spark = Spark(get_dist_config())
        # We need to reconfigure spark only if the master changes or if we
        # are in HA mode and a new potential master is added
        if spark.update_peers(nodes):
            hookenv.status_set('maintenance', 'Updating Apache Spark config')
            spark.stop()
            spark.configure()
            spark.start()
            report_status(spark)
示例#29
0
def update_peers_config(nodes):
    nodes.sort()
    if data_changed('available.peers', nodes):
        spark = Spark(get_dist_config())
        # We need to reconfigure spark only if the master changes or if we
        # are in HA mode and a new potential master is added
        if spark.update_peers(nodes):
            hookenv.status_set('maintenance', 'Updating Apache Spark config')
            spark.stop()
            spark.configure()
            spark.start()
            report_status(spark)
示例#30
0
def install_hive(hadoop):
    # Hive cannot handle - in the metastore db name and
    # mysql uses the service name to name the db
    if "-" in hookenv.service_name():
        hookenv.status_set('blocked', 'Service name should not contain -. '
                                      'Redeploy with a different name.')
        return

    hive = Hive(get_dist_config())
    if hive.verify_resources():
        hookenv.status_set('maintenance', 'Installing Apache Hive')
        hive.install()
        set_state('hive.installed')
示例#31
0
def install_spark():
    dist = get_dist_config()
    spark = Spark(dist)
    hookenv.status_set('maintenance', 'Installing Apache Spark')
    try:
        spark.install()
    except ResourceError as e:
        hookenv.status_set('blocked', str(e))
        return
    spark.setup_spark_config()
    spark.install_demo()
    set_state('spark.installed')
    set_state('not.upgrading')
示例#32
0
def install_spark():
    dist = get_dist_config()
    spark = Spark(dist)
    hookenv.status_set('maintenance', 'Installing Apache Spark')
    try:
        spark.install()
    except ResourceError as e:
        hookenv.status_set('blocked', str(e))
        return
    spark.setup_spark_config()
    spark.install_demo()
    set_state('spark.installed')
    set_state('not.upgrading')
def restart_services():
    dc = get_dist_config()
    spark = Spark(dc)
    peers = RelationBase.from_state('sparkpeers.joined')
    is_scaled = peers and len(peers.get_nodes()) > 0
    is_master = spark.is_master()
    is_slave = not is_master or not is_scaled
    master_url = spark.get_master()
    master_ip = spark.get_master_ip()
    if data_changed('insightedge.master_url', master_url):
        stop_datagrid_services()
        start_datagrid_services(master_url, master_ip, is_master, is_slave)
    set_state('insightedge.ready')
    hookenv.status_set('active', 'ready')
示例#34
0
def switch_to_yarn(hadoop):
    '''
    In case you first change the config and then connect the plugin.
    '''
    mode = hookenv.config()['spark_execution_mode']
    if mode.startswith('yarn'):
        spark = Spark(get_dist_config())
        hookenv.status_set('maintenance', 'Setting up Apache Spark for YARN')
        spark.stop()
        spark.configure_yarn_mode()
        set_state('yarn.configured')
        spark.configure()
        spark.start()
        report_status(spark)
示例#35
0
def upgrade_spark():
    if is_state('not.upgrading'):
        return (False, "Please enter maintenance mode before triggering the upgrade.")

    version = hookenv.config()['spark_version']
    hookenv.status_set('maintenance', 'Upgrading to {}'.format(version))
    hookenv.log("Upgrading to {}".format(version))
    try:
        spark = Spark(get_dist_config())
        spark.switch_version(version)
    except ResourceError:
        return (False, "Download failed")
    hookenv.status_set('maintenance', 'Upgrade complete. You can exit maintenance mode')
    return (True, "ok")
示例#36
0
def switch_to_yarn(hadoop):
    '''
    In case you first change the config and then connect the plugin.
    '''
    mode = hookenv.config()['spark_execution_mode']
    if mode.startswith('yarn'):
        spark = Spark(get_dist_config())
        hookenv.status_set('maintenance', 'Setting up Apache Spark for YARN')
        spark.stop()
        spark.configure_yarn_mode()
        set_state('yarn.configured')
        spark.configure()
        spark.start()
        report_status(spark)
def configure_insightedge_spark():
    hookenv.status_set('maintenance', 'configuring insightedge')
    dc = get_dist_config()
    destination = dc.path('spark')
    spark = Spark(dc)
    with host.chdir(destination):
        insightedge_jars = subprocess.check_output([
            'bash', '-c', '. {}; get_libs ,'.format(
                destination / 'sbin' / 'common-insightedge.sh')
        ],
                                                   env={
                                                       'INSIGHTEDGE_HOME':
                                                       destination
                                                   }).decode('utf8')
    spark.register_classpaths(insightedge_jars.split(','))
    set_state('insightedge-spark.configured')
def start_datagrid_services(master_url, master_ip, is_master, is_slave):
    # TODO:
    #   * some of the below settings should be exposed as charm config
    dc = get_dist_config()
    ie_home = dc.path('spark')
    if is_master:
        subprocess.call([
            ie_home / "sbin" / "start-datagrid-master.sh", "--master",
            master_ip, "--size", "1G"
        ])
    if is_slave:
        subprocess.call([
            ie_home / "sbin" / "start-datagrid-slave.sh", "--master",
            master_url, "--locator", "{}:4174".format(master_ip), "--group",
            "insightedge", "--name", "insightedge-space", "--topology", "2,0",
            "--size", "1G", "--instances", "id=1;id=2"
        ])
def install_spark(hadoop=None):
    spark_master_host = leadership.leader_get('master-fqdn')
    hosts = {
        'spark-master': spark_master_host,
    }

    if is_state('hadoop.yarn.ready'):
        rms = hadoop.resourcemanagers()
        hosts['resourcemanager'] = rms[0]

    if is_state('hadoop.hdfs.ready'):
        nns = hadoop.namenodes()
        hosts['namenode'] = nns[0]

    dist = get_dist_config()
    spark = Spark(dist)
    spark.configure(hosts)
示例#40
0
def configure_pig():
    pig = Pig(get_dist_config())
    hadoop_ready = is_state('hadoop.ready')
    if hadoop_ready:
        hookenv.status_set('maintenance', 'configuring pig (mapreduce)')
        hookenv.log('YARN is ready, configuring Apache Pig in MapReduce mode')
        pig.configure_yarn()
        remove_state('pig.configured.local')
        set_state('pig.configured.yarn')
        hookenv.status_set('active', 'ready (mapreduce)')
        hookenv.log('Apache Pig is ready in MapReduce mode')
    else:
        hookenv.status_set('maintenance', 'configuring pig (local)')
        hookenv.log('YARN is not ready, configuring Pig in local mode')
        pig.configure_local()
        remove_state('pig.configured.yarn')
        set_state('pig.configured.local')
        hookenv.status_set('active', 'ready (local)')
        hookenv.log('Apache Pig is ready in local mode')
示例#41
0
def reconfigure_spark():
    mode = hookenv.config()['spark_execution_mode']
    hookenv.status_set('maintenance', 'Configuring Apache Spark')
    spark = Spark(get_dist_config())
    spark.stop()
    if is_state('hadoop.ready') and mode.startswith('yarn') and (not is_state('yarn.configured')):
        # was in a mode other than yarn, going to yarn
        hookenv.status_set('maintenance', 'Setting up Apache Spark for YARN')
        spark.configure_yarn_mode()
        set_state('yarn.configured')

    if is_state('hadoop.ready') and (not mode.startswith('yarn')) and is_state('yarn.configured'):
        # was in a yarn mode and going to another mode
        hookenv.status_set('maintenance', 'Disconnecting Apache Spark from YARN')
        spark.disable_yarn_mode()
        remove_state('yarn.configured')

    spark.configure()
    spark.start()
    report_status(spark)
示例#42
0
def depart_spark():
    hookenv.status_set('maintenance', 'Disconnecting Spark from Hue')
    remove_state('spark.configured')
    hue = Hue(get_dist_config())
    hue.update_apps()
    hue.restart()
示例#43
0
def depart_zookeeper():
    hookenv.status_set('maintenance', 'Disconnecting Zookeeper from Hue')
    remove_state('zookeeper.configured')
    hue = Hue(get_dist_config())
    hue.update_apps()
    hue.restart()
示例#44
0
 def restart_hue():
     # Can't seem to mix @when_file_changed and @when('hue.started')
     hue = Hue(get_dist_config())
     hue.restart()
示例#45
0
def check_relations(*args):
    hue = Hue(get_dist_config())
    hue.check_relations()
示例#46
0
def stop_hue():
    hue = Hue(get_dist_config())
    hue.stop()
    remove_state('hue.started')
    hookenv.status_set('blocked', 'Waiting for Hadoop connection')
示例#47
0
def start_hue(hadoop):
    hookenv.status_set('maintenance', 'Setting up Hue')
    hue = Hue(get_dist_config())
    hue.open_ports()
    hue.start()
    set_state('hue.started')
示例#48
0
def install_hue(hadoop):
    hue = Hue(get_dist_config())
    if hue.verify_resources():
        hookenv.status_set('maintenance', 'Installing Hue')
        hue.install()
        set_state('hue.installed')
示例#49
0
def stop_hue():
    hue = Hue(get_dist_config())
    hue.stop()
    remove_state('hue.started')
    hookenv.status_set('blocked', 'Waiting for Hadoop connection')
示例#50
0
def start_hue(hadoop):
    hookenv.status_set('maintenance', 'Setting up Hue')
    hue = Hue(get_dist_config())
    hue.open_ports()
    hue.start()
    set_state('hue.started')
示例#51
0
 def restart_hue():
     # Can't seem to mix @when_file_changed and @when('hue.started')
     hue = Hue(get_dist_config())
     hue.restart()
示例#52
0
def check_relations(*args):
    hue = Hue(get_dist_config())
    hue.check_relations()