def azkaban_start(info):
    ssh_client = LinuxClient(info["ip"],
                             username=USER,
                             password=PASSWORD,
                             port=info["port"])
    ssh_client.exec_shell_command(
        "cd /home/{0}/azkaban/azkaban-exec-server-3.15.0;./bin/azkaban-exec-start.sh"
        .format(USER))
    ssh_client.exec_shell_command(
        "cd /home/{0}/azkaban/azkaban-web-server-3.15.0;./bin/azkaban-web-start.sh"
        .format(USER))
    ssh_client.release()
def hadoop_start(info):
    ssh_client = LinuxClient(info["ip"],
                             username=USER,
                             password=PASSWORD,
                             port=info["port"])
    root_ssh_client = LinuxClient(info["ip"],
                                  username=USER,
                                  password=PASSWORD,
                                  port=info["port"])

    cmd_set = (
        "cd /home/{0}/hadoop-2.7.3/bin/;./hdfs namenode -format".format(USER),
        "cd /home/{0}/hadoop-2.7.3/sbin/;./start-dfs.sh".format(USER),
        "cd /home/{0}/hadoop-2.7.3/sbin/;./start-yarn.sh".format(USER),
    )
    map(ssh_client.exec_shell_command, cmd_set)
    root_ssh_client.exec_shell_command(
        "cd /home/{0}/hadoop-2.7.3/sbin/;./start-secure-dns.sh".format(USER))
    ssh_client.release()
    root_ssh_client.release()
Beispiel #3
0
def hadoopy_install(info):
    host_name = info["hostname"]
    ssh_client = LinuxClient(info["ip"], username=USER, password=PASSWORD, port=info["port"])
    no_secret(ssh_client)
    ssh_client.transport_dir(os.path.join(RESOURCES_DIR, ".keys/"), "/home/{0}/.keys".format(USER))
    global namenode, datanodes, hmaster, regionservers, spark_server, metastores
    global coordinator, workers, redis_server, zookeepers, azkaban_server, ldap_server
    if host_name == namenode or host_name in datanodes:
        hadoop(ssh_client)
    if host_name == hmaster or host_name in regionservers:
        hbase(ssh_client)
    if host_name == namenode or host_name in spark_server or host_name in metastores:
        hive(ssh_client)
        spark(ssh_client)
    if host_name == coordinator or host_name in workers:
        set_presto_config(coordinator, host_name == coordinator)
        set_presto_node("master" if host_name == coordinator else "worker-{0}".format(workers.index(host_name) + 1))
        presto(ssh_client)
    if host_name == redis_server:
        redis(ssh_client)
    if host_name in zookeepers:
        zookeeper(ssh_client, zookeepers.index(host_name) + 1)
    if host_name == azkaban_server:
        azkaban(ssh_client)
    if ldap_server == host_name:
        kadmin_ldap_server(ssh_client, ldap_server)
    ssh_client.release()
def kadmin_ldap_server_start(info):
    ssh_client = LinuxClient(info["ip"],
                             username=USER,
                             password=PASSWORD,
                             port=info["port"])
    ssh_client.exec_shell_command(
        "cd /home/{0}/kadmin_ldap_server;python app.py &".format(USER))
    ssh_client.release()
def zookeeper_start(info):
    ssh_client = LinuxClient(info["ip"],
                             username=USER,
                             password=PASSWORD,
                             port=info["port"])
    ssh_client.exec_shell_command(
        "cd /home/{0}/zookeeper-3.4.9/bin;./zkServer.sh start".format(USER))
    ssh_client.release()
def hbase_start(info):
    ssh_client = LinuxClient(info["ip"],
                             username=USER,
                             password=PASSWORD,
                             port=info["port"])
    ssh_client.exec_shell_command(
        "cd /home/{0}/hbase-1.2.4/bin;./start-hbase.sh".format(USER))
    ssh_client.release()
def presto_start(info):
    ssh_client = LinuxClient(info["ip"],
                             username=USER,
                             password=PASSWORD,
                             port=info["port"])
    ssh_client.exec_shell_command(
        "cd /home/{0}/presto/presto-server-0.170/bin;./launcher start".format(
            USER))
    ssh_client.release()
def hive_start(info):
    ssh_client = LinuxClient(info["ip"],
                             username=USER,
                             password=PASSWORD,
                             port=info["port"])
    ssh_client.exec_shell_command(
        "cd /home/{0}/apache-hive-1.2.1-bin;./bin/hive -service metastore &".
        format(USER))
    ssh_client.release()
Beispiel #9
0
def server_init(info):
    host_name = info["hostname"]
    ssh_client = LinuxClient(info["ip"], username="******", password=info["password"],
                             port=info["port"])
    no_secret(ssh_client, is_root=True)
    add_user(ssh_client)
    global etc_hosts, mysql_server, ldap_server, host_dict
    fix_host(ssh_client, host_name, etc_hosts)
    jdk(ssh_client)
    if mysql_server == host_name:
        mysql(ssh_client)
        ssh_client.release()
        ssh_client = LinuxClient(info["ip"], username="******", password=info["password"],
                                 port=info["port"])
        ssh_client.exec_shell_command("echo 'max_allowed_packet=50m' >> /etc/my.cnf")
        ssh_client.release()
        ssh_client = LinuxClient(info["ip"], username="******", password=info["password"],
                                 port=info["port"])
        ssh_client.exec_shell_command("systemctl start mysqld")
        create_database(ssh_client)
    if ldap_server == host_name:
        kdcldap(ssh_client)
        mlogcn_inn(ssh_client, info["ip"])
        pythonldap(ssh_client)
        save_keytab(ssh_client, host_dict)
    ssh_client.transport_file(os.path.join(TMP_DIR, "krb5.conf"), "/etc/krb5.conf")
    ssh_client.release()