コード例 #1
0
ファイル: kafka.py プロジェクト: aljoscha/yoka
def configure():
    # config
    context = conf.copy()
    context['id'] = get_slave_id(env.host_string)
    context['zookeeper_server'] = [{'name' : name} for name in env.hostnames[0:conf['num_instances']]]
    destination = "%s/%s" % (PATH, "config")
    process_template("kafka", "server.properties.mustache", context, destination)
コード例 #2
0
ファイル: storm.py プロジェクト: mxm/yoka
def configure():
    # config
    context = conf.copy()
    context['zookeeper'] = [{'server': address} for address in env.hostnames[0:conf['num_zookeeper_instances']]]
    context['master'] = env.master
    context['supervisor_slots'] = [{'slot': 6700 + i} for i in range(conf['num_supervisor_slots'])]
    destination = "%s/%s" % (PATH, "conf")
    process_template("storm", "storm.yaml.mustache", context, destination)
コード例 #3
0
ファイル: zookeeper.py プロジェクト: aljoscha/yoka
def configure():
    # config
    context = conf.copy()
    context['server'] = [{'i': i, 'name': name} for i, name in enumerate(env.hostnames[0:conf['num_instances']])]
    destination = "%s/%s" % (PATH, "conf")
    process_template("zookeeper", "zoo.cfg.mustache", context, destination)
    # data dir
    run("rm -rf %s" % conf['data_dir'])
    run("mkdir -p %s" % conf['data_dir'])
コード例 #4
0
def configure():
    # config
    context = conf.copy()
    context['id'] = get_slave_id(env.host_string)
    context['zookeeper_server'] = [{
        'name': name
    } for name in env.hostnames[0:conf['num_instances']]]
    destination = "%s/%s" % (PATH, "config")
    process_template("kafka", "server.properties.mustache", context,
                     destination)
コード例 #5
0
def configure():
    # config
    context = conf.copy()
    context['server'] = [{
        'i': i,
        'name': name
    } for i, name in enumerate(env.hostnames[0:conf['num_instances']])]
    destination = "%s/%s" % (PATH, "conf")
    process_template("zookeeper", "zoo.cfg.mustache", context, destination)
    # data dir
    run("rm -rf %s" % conf['data_dir'])
    run("mkdir -p %s" % conf['data_dir'])
コード例 #6
0
ファイル: storm.py プロジェクト: mxm/yoka
def configure():
    # config
    context = conf.copy()
    context['zookeeper'] = [{
        'server': address
    } for address in env.hostnames[0:conf['num_zookeeper_instances']]]
    context['master'] = env.master
    context['supervisor_slots'] = [{
        'slot': 6700 + i
    } for i in range(conf['num_supervisor_slots'])]
    destination = "%s/%s" % (PATH, "conf")
    process_template("storm", "storm.yaml.mustache", context, destination)
コード例 #7
0
ファイル: flink.py プロジェクト: mxm/yoka
def configure():
    context = conf.copy()
    context['java_home'] = find_java_home()
    context['master'] = env.master
    # get hadoop conf environment variable
    context['hadoop_conf_path'] = run("echo $HADOOP_CONF_DIR")
    destination = get_flink_dist_path() + "/conf"
    process_template("flink", "flink-conf.yaml.mustache", context, destination)
    slaves = '\n'.join(env.slaves)
    context2 = {'slaves' : slaves}
    process_template("flink", "slaves.mustache", context2, destination)
    # update the PATH variable
    run("echo export PATH=$PATH:'%s'/bin >> %s" % (PATH, "~/.profile"))
コード例 #8
0
ファイル: hadoop.py プロジェクト: ktzoumas/flink-perf-new
def configure():
    set_java_home("%s/etc/hadoop/hadoop-env.sh" % conf['path'])
    # configure hdfs
    context = conf.copy()
    context['master'] = env.master
    context['namenode_path'] = "%s/%s" % (conf['data_path'], namenode_dir)
    context['datanode_path'] = "%s/%s" % (conf['data_path'], datanode_dir)
    destination = conf['path'] + "/etc/hadoop/"
    process_template("hadoop", "hdfs-site.xml.mustache", context, destination)
    slaves = '\n'.join(env.slaves)
    context['slaves'] = slaves
    process_template("hadoop", "slaves.mustache", context, destination)
    format_hdfs_master()
    # configure YARN
    process_template("hadoop", "core-site.xml.mustache", context, destination)
    process_template("hadoop", "yarn-site.xml.mustache", context, destination)
    # configure MapReduce v2
    process_template("hadoop", "mapred-site.xml.mustache", context, destination)
コード例 #9
0
ファイル: hadoop.py プロジェクト: aljoscha/yoka
def configure():
    set_java_home("%s/etc/hadoop/hadoop-env.sh" % PATH)
    # configure hdfs
    context = conf.copy()
    context['master'] = env.master
    context['namenode_path'] = "%s/%s" % (conf['data_path'], namenode_dir)
    context['datanode_path'] = "%s/%s" % (conf['data_path'], datanode_dir)
    destination = PATH + "/etc/hadoop/"
    process_template("hadoop", "hdfs-site.xml.mustache", context, destination)
    slaves = '\n'.join(env.slaves)
    context['slaves'] = slaves
    process_template("hadoop", "slaves.mustache", context, destination)
    # delete all data and ensure consistent state
    execute(format_hdfs_master)
    execute(delete_data_slaves)
    # configure YARN
    process_template("hadoop", "core-site.xml.mustache", context, destination)
    process_template("hadoop", "yarn-site.xml.mustache", context, destination)
    # configure MapReduce v2
    process_template("hadoop", "mapred-site.xml.mustache", context, destination)
コード例 #10
0
ファイル: tez.py プロジェクト: aljoscha/yoka
def configure():
    # build tez
    with cd(PATH):
        run("mvn clean package -DskipTests=true -Dmaven.javadoc.skip=true > /dev/null")
    # copy to HDFS
    mkdir_hdfs("/tez")
    hdfs_path = "/tez/tarball.tar.gz"
    copy_to_hdfs(get_tez_tarball_path("tez-*SNAPSHOT.tar.gz"), hdfs_path)
    # configure tez
    destination = PATH
    process_template(module="tez",
                     template="tez-site.xml.mustache",
                     context={'hdfs_path' : hdfs_path},
                     destination=destination)
    # configure client
    run("rm -rf '%s'" % conf['path_client'])
    tarball_location = get_tez_tarball_path("tez*-minimal.tar.gz")
    run("mkdir -p %s" % conf['path_client'])
    run("tar -xzf %s -C %s" % (tarball_location, conf['path_client']))
    run("echo 'export TEZ_CONF_DIR=%s' >> ~/.profile" % path)
    run("echo 'export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:${TEZ_CONF_DIR}:${TEZ_JARS}/*:${TEZ_JARS}/lib/*' >> ~/.profile")
コード例 #11
0
ファイル: hadoop.py プロジェクト: mxm/yoka
def configure():
    set_java_home("%s/etc/hadoop/hadoop-env.sh" % PATH)
    # configure hdfs
    context = conf.copy()
    context['master'] = env.master
    context['namenode_path'] = "%s/%s" % (conf['data_path'], namenode_dir)
    context['datanode_path'] = "%s/%s" % (conf['data_path'], datanode_dir)
    destination = PATH + "/etc/hadoop/"
    process_template("hadoop", "hdfs-site.xml.mustache", context, destination)
    slaves = '\n'.join(env.slaves)
    context['slaves'] = slaves
    process_template("hadoop", "slaves.mustache", context, destination)
    # delete all data and ensure consistent state
    # TODO check if we really need to format (in case of resume)
    execute(format_hdfs_master)
    execute(delete_data_slaves)
    # configure YARN
    process_template("hadoop", "core-site.xml.mustache", context, destination)
    process_template("hadoop", "yarn-site.xml.mustache", context, destination)
    # configure MapReduce v2
    process_template("hadoop", "mapred-site.xml.mustache", context, destination)
コード例 #12
0
def configure():
    # build tez
    with cd(PATH):
        run("mvn clean package -DskipTests=true -Dmaven.javadoc.skip=true > /dev/null")
    # copy to HDFS
    mkdir_hdfs("/tez")
    hdfs_path = "/tez/tarball.tar.gz"
    copy_to_hdfs(get_tez_tarball_path("tez-*SNAPSHOT.tar.gz"), hdfs_path)
    # configure tez
    destination = PATH
    process_template(module="tez",
                     template="tez-site.xml.mustache",
                     context={'hdfs_path' : hdfs_path},
                     destination=destination)
    # configure client
    run("rm -rf '%s'" % conf['path_client'])
    tarball_location = get_tez_tarball_path("tez*-minimal.tar.gz")
    run("mkdir -p %s" % conf['path_client'])
    run("tar -xzf %s -C %s" % (tarball_location, conf['path_client']))
    run("echo 'export TEZ_CONF_DIR=%s' >> ~/.profile" % PATH)
    run("echo 'export TEZ_JARS=%s' >> ~/.profile" % conf['path_client'])
    run("echo 'export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:${TEZ_CONF_DIR}:${TEZ_JARS}/*:${TEZ_JARS}/lib/*' >> ~/.profile")