def generate_start_script(args, host, job_name, host_id, instance_id): supervisor_client = deploy_utils.get_supervisor_client(host, "zookeeper", args.zookeeper_config.cluster.name, job_name, instance_id=instance_id) run_dir = supervisor_client.get_run_dir() artifact_and_version = "zookeeper-" + args.zookeeper_config.cluster.version component_dir = "$package_dir" # must include both [dir]/ and [dir]/* as [dir]/* only import all jars under # this dir but we also need access the webapps under this dir. jar_dirs = "%s/:%s/lib/*:%s/*" % (component_dir, component_dir, component_dir) job = args.zookeeper_config.jobs["zookeeper"] log_level = deploy_utils.get_service_log_level(args, args.zookeeper_config) params = job.get_arguments(args, args.zookeeper_config.cluster, args.zookeeper_config.jobs, args.zookeeper_config.arguments_dict, job_name, host_id, instance_id) script_dict = { "artifact": artifact_and_version, "job_name": job_name, "jar_dirs": jar_dirs, "run_dir": run_dir, "params": params, } return deploy_utils.create_run_script( '%s/start.sh.tmpl' % deploy_utils.get_template_dir(), script_dict)
def generate_metrics_config(args, host, job_name, instance_id=-1): job = args.hbase_config.jobs[job_name] supervisor_client = deploy_utils.get_supervisor_client( host, "hbase", args.hbase_config.cluster.name, job_name, instance_id=instance_id) ganglia_switch = "# " if args.hbase_config.cluster.ganglia_address: ganglia_switch = "" config_dict = { "job_name": job_name, "period": 10, "data_dir": supervisor_client.get_log_dir(), "ganglia_address": args.hbase_config.cluster.ganglia_address, "ganglia_switch": ganglia_switch, } local_path = "%s/hadoop-metrics.properties.tmpl" % deploy_utils.get_template_dir( ) template = deploy_utils.Template(open(local_path, "r").read()) return template.substitute(config_dict)
def generate_cleanup_script(args, job_name): storm_yaml_dict = args.storm_config.configuration.generated_files["storm.yaml"] script_dict = { "job_name": job_name, "storm_local_dir": storm_yaml_dict['storm.local.dir'], } return deploy_utils.create_run_script( "%s/storm/cleanup_storm.sh.tmpl" % deploy_utils.get_template_dir(), script_dict)
def generate_bootstrap_script(args, host, job_name, host_id): supervisor_client = deploy_utils.get_supervisor_client( host, "zookeeper", args.zookeeper_config.cluster.name, job_name ) data_dir = supervisor_client.get_available_data_dirs()[0] myid_file = "%s/%s" % (data_dir, MYID_FILE) script_dict = {"myid_file": myid_file, "host_id": host_id} return deploy_utils.create_run_script("%s/bootstrap_zk.sh.tmpl" % deploy_utils.get_template_dir(), script_dict)
def generate_cleanup_script(args, host, job_name, active): script_params = generate_run_scripts_params(args, host, job_name) script_params['params'] += " -clearZK" if active: script_params['ha_status'] = 'active' else: script_params['ha_status'] = 'standby' return deploy_utils.create_run_script( '%s/cleanup_hdfs.sh.tmpl' % deploy_utils.get_template_dir(), script_params)
def generate_cleanup_script(args, host, job_name, host_id, instance_id, active): script_params = generate_run_scripts_params(args, host, job_name, host_id, instance_id) script_params['params'] += " -clearZK" if active: script_params['ha_status'] = 'active' else: script_params['ha_status'] = 'standby' return deploy_utils.create_run_script( '%s/cleanup_hdfs.sh.tmpl' % deploy_utils.get_template_dir(), script_params)
def generate_start_script(args, host, job_name): supervisor_client = deploy_utils.get_supervisor_client(host, "zookeeper", args.zookeeper_config.cluster.name, job_name) run_dir = supervisor_client.get_run_dir() artifact_and_version = "zookeeper-" + args.zookeeper_config.cluster.version component_dir = "$package_dir" # must include both [dir]/ and [dir]/* as [dir]/* only import all jars under # this dir but we also need access the webapps under this dir. jar_dirs = "%s/:%s/lib/*:%s/*" % (component_dir, component_dir, component_dir) job = args.zookeeper_config.jobs["zookeeper"] script_dict = { "artifact": artifact_and_version, "job_name": job_name, "jar_dirs": jar_dirs, "run_dir": run_dir, "params": '-Xmx%dm ' % job.xmx + '-Xms%dm ' % job.xms + '-Xmn%dm ' % job.xmn + '-XX:MaxDirectMemorySize=%dm ' % job.max_direct_memory + '-XX:MaxPermSize=%dm ' % job.max_perm_size + '-XX:+DisableExplicitGC ' + '-XX:+HeapDumpOnOutOfMemoryError ' + '-XX:HeapDumpPath=$log_dir ' + '-XX:+PrintGCApplicationStoppedTime ' + '-XX:+UseConcMarkSweepGC ' + '-XX:CMSInitiatingOccupancyFraction=80 ' + '-XX:+UseMembar ' + '-verbose:gc ' + '-XX:+PrintGCDetails ' + '-XX:+PrintGCDateStamps ' + '-Xloggc:$run_dir/stdout/zk_gc_${start_time}.log ' + '-Djava.net.preferIPv4Stack=true ' + '-Dzookeeper.log.dir=$log_dir ' + '-Dzookeeper.cluster=%s ' % args.zookeeper_config.cluster.name + '-Dzookeeper.tracelog.dir=$log_dir ', } # Config security if deploy_utils.is_security_enabled(args): script_dict["params"] += '-Dzookeeper.superUser=zk_admin ' script_dict["params"] += '-Djava.security.auth.login.config=$run_dir/jaas.conf ' script_dict["params"] += '-Djava.security.krb5.conf=$run_dir/krb5.conf ' script_dict["params"] += 'org.apache.zookeeper.server.quorum.QuorumPeerMain ' script_dict["params"] += '$run_dir/zookeeper.cfg' return deploy_utils.create_run_script( '%s/start.sh.tmpl' % deploy_utils.get_template_dir(), script_dict)
def generate_bootstrap_script(args, host, job_name, host_id): supervisor_client = deploy_utils.get_supervisor_client(host, "zookeeper", args.zookeeper_config.cluster.name, job_name) data_dir = supervisor_client.get_available_data_dirs()[0] myid_file = "%s/%s" % (data_dir, MYID_FILE) script_dict = { 'myid_file': myid_file, 'host_id': host_id, } return deploy_utils.create_run_script( '%s/bootstrap_zk.sh.tmpl' % deploy_utils.get_template_dir(), script_dict)
def generate_bootstrap_script(args, host, job_name, host_id, instance_id): supervisor_client = deploy_utils.get_supervisor_client(host, "zookeeper", args.zookeeper_config.cluster.name, job_name, instance_id=instance_id) data_dir = supervisor_client.get_available_data_dirs()[0] myid_file = "%s/%s" % (data_dir, MYID_FILE) hosts = args.zookeeper_config.jobs["zookeeper"].hosts task_id = deploy_utils.get_task_id(hosts, host_id, instance_id) script_dict = { 'myid_file': myid_file, 'host_id': task_id, } return deploy_utils.create_run_script( '%s/bootstrap_zk.sh.tmpl' % deploy_utils.get_template_dir(), script_dict)
def generate_bootstrap_script(args, host, job_name, host_id, instance_id, active): option = str() script_params = generate_run_scripts_params(args, host, job_name, host_id, instance_id) script_params['ha_status'] = 'standby' if job_name == "zkfc": if active: option = "-formatZK" script_params['ha_status'] = 'active' elif job_name == "namenode": if active: option = "-format -nonInteractive" else: option = "-bootstrapStandby -skipSharedEditsCheck -nonInteractive" script_params['params'] += " %s" % option return deploy_utils.create_run_script( '%s/bootstrap_hdfs.sh.tmpl' % deploy_utils.get_template_dir(), script_params)
def generate_bootstrap_script(args, host, job_name, active): option = str() script_params = generate_run_scripts_params(args, host, job_name) script_params['ha_status'] = 'standby' if job_name == "zkfc": if active: option = "-formatZK" script_params['ha_status'] = 'active' elif job_name == "namenode": if active: option = "-format -nonInteractive" else: option = "-bootstrapStandby -skipSharedEditsCheck -nonInteractive" script_params['params'] += " %s" % option return deploy_utils.create_run_script( '%s/bootstrap_hdfs.sh.tmpl' % deploy_utils.get_template_dir(), script_params)
def generate_metrics_config(args, host, job_name, instance_id=-1): job = args.yarn_config.jobs[job_name] supervisor_client = deploy_utils.get_supervisor_client(host, "yarn", args.yarn_config.cluster.name, job_name, instance_id=instance_id) ganglia_switch = "# " if args.yarn_config.cluster.ganglia_address: ganglia_switch = "" config_dict = { "job_name": job_name, "period": 10, "data_dir": supervisor_client.get_log_dir(), "ganglia_address": args.yarn_config.cluster.ganglia_address, "ganglia_switch": ganglia_switch, } local_path = "%s/hadoop-metrics2.properties.tmpl" % deploy_utils.get_template_dir() template = deploy_utils.Template(open(local_path, "r").read()) return template.substitute(config_dict)
def generate_start_script(args, host, job_name): script_params = generate_run_scripts_params(args, host, job_name) script_params["params"] += " start" return deploy_utils.create_run_script( "%s/start.sh.tmpl" % deploy_utils.get_template_dir(), script_params)
def generate_zookeeper_config(args): config_dict = args.zookeeper_config.configuration.generated_files["zookeeper.cfg"] local_path = "%s/zookeeper.cfg.tmpl" % deploy_utils.get_template_dir() template = deploy_utils.Template(open(local_path, "r").read()) return template.substitute(config_dict)
def generate_start_script(args, host, job_name): script_params = generate_run_scripts_params(args, host, job_name) return deploy_utils.create_run_script( '%s/start.sh.tmpl' % deploy_utils.get_template_dir(), script_params)
def generate_start_script(args, host, job_name, host_id, instance_id): script_params = generate_run_scripts_params(args, host, job_name, host_id, instance_id) return deploy_utils.create_run_script( '%s/start.sh.tmpl' % deploy_utils.get_template_dir(), script_params)
def generate_start_script(args, host, job_name, host_id, instance_id): script_params = generate_run_scripts_params(args, host, job_name, host_id, instance_id) return deploy_utils.create_run_script( "%s/start.sh.tmpl" % deploy_utils.get_template_dir(), script_params)