def start_job(args, host, job_name): config_files = generate_configs(args, host, job_name) start_script = generate_start_script(args, host, job_name) http_url = 'http://%s:%d' % (host, args.hbase_config.jobs[job_name].base_port + 1) deploy_utils.start_job(args, "hbase", "hbase", args.hbase_config, host, job_name, start_script, http_url, **config_files)
def start_job(args, host, job_name): config_files = generate_configs(args, host, job_name) start_script = generate_start_script(args, host, job_name) http_url = 'http://%s:%d' % (host, args.yarn_config.jobs[job_name].base_port + 1) deploy_utils.start_job(args, "hadoop", "yarn", args.yarn_config, host, job_name, start_script, http_url, **config_files)
def start_job(args, host, job_name): config_files = generate_configs(args) start_script = generate_start_script(args, host, job_name) http_url = "http://%s:%d" % (host, args.impala_config.jobs[job_name].base_port + 1) deploy_utils.start_job(args, "impala", "impala", args.impala_config, host, job_name, start_script, http_url, **config_files)
def start_job(args, host, job_name): config_files = generate_run_scripts(args) start_script = generate_start_script(args, host, job_name) http_url = "" deploy_utils.start_job( args, "zookeeper", "zookeeper", args.zookeeper_config, host, job_name, start_script, http_url, **config_files )
def start_job(args, host, job_name, host_id, instance_id): # parse the service_config according to the instance_id args.zookeeper_config.parse_generated_config_files(args, job_name, host_id, instance_id) config_files = generate_run_scripts(args) start_script = generate_start_script(args, host, job_name, host_id, instance_id) http_url = '' deploy_utils.start_job(args, "zookeeper", "zookeeper", args.zookeeper_config, host, job_name, instance_id, start_script, http_url, **config_files)
def start_job(args, host, job_name, host_id, instance_id): # parse the service_config according to the instance_id args.impala_config.parse_generated_config_files(args, job_name, host_id, instance_id) config_files = generate_configs(args) start_script = generate_start_script(args, host, job_name, host_id, instance_id) http_url = deploy_utils.get_http_service_uri(host, args.impala_config.jobs[job_name].base_port, instance_id) deploy_utils.start_job(args, "impala", "impala", args.impala_config, host, job_name, instance_id, start_script, http_url, **config_files)
def start_job(args, host, job_name, host_id, instance_id): # parse the service_config according to the instance_id args.yarn_config.parse_generated_config_files(args, job_name, host_id, instance_id) config_files = generate_configs(args, host, job_name, instance_id) start_script = generate_start_script(args, host, job_name, host_id, instance_id) http_url = deploy_utils.get_http_service_uri(host, args.yarn_config.jobs[job_name].base_port, instance_id) deploy_utils.start_job(args, "hadoop", "yarn", args.yarn_config, host, job_name, instance_id, start_script, http_url, **config_files)
def start_job(args, host, job_name): start_script = generate_start_script(args, host, job_name) http_url = 'http://%s:%d' % (host, args.hdfs_config.jobs[job_name].base_port + 1) config_files = dict() if not args.skip_gen_config_files: config_files = generate_configs(args, host, job_name) deploy_utils.start_job(args, "hadoop", "hdfs", args.hdfs_config, host, job_name, start_script, http_url, **config_files)
def start_job(args, host, job_name, host_id, instance_id): # parse the service_config according to the instance_id args.hdfs_config.parse_generated_config_files(args, job_name, host_id, instance_id) start_script = generate_start_script(args, host, job_name, host_id, instance_id) http_url = deploy_utils.get_http_service_uri(host, args.hdfs_config.jobs[job_name].base_port, instance_id) config_files = dict() if not args.skip_gen_config_files: config_files = generate_configs(args, host, job_name, instance_id) deploy_utils.start_job(args, "hadoop", "hdfs", args.hdfs_config, host, job_name, instance_id, start_script, http_url, **config_files)
def start_job(args, host, job_name, host_id, instance_id, is_wait=False): if is_wait: deploy_utils.wait_for_job_stopping("hbase", args.hbase_config.cluster.name, job_name, host, instance_id) # parse the service_config according to the instance_id args.hbase_config.parse_generated_config_files(args, job_name, host_id, instance_id) config_files = generate_configs(args, host, job_name, instance_id) start_script = generate_start_script(args, host, job_name, host_id, instance_id) http_url = deploy_utils.get_http_service_uri(host, args.hbase_config.jobs[job_name].base_port, instance_id) deploy_utils.start_job(args, "hbase", "hbase", args.hbase_config, host, job_name, instance_id, start_script, http_url, **config_files)
def start_job(args, host, job_name, host_id, instance_id, is_wait=False): if is_wait: deploy_utils.wait_for_job_stopping("storm", args.storm_config.cluster.name, job_name, host, instance_id) # parse the service_config according to the instance_id args.storm_config.parse_generated_config_files(args, job_name, host_id, instance_id) config_files = generate_configs(args, host, job_name, instance_id) start_script = generate_start_script(args, host, job_name, host_id, instance_id) http_url = deploy_utils.get_http_service_uri(host, args.storm_config.jobs[job_name].base_port, instance_id) deploy_utils.start_job(args, "apache-storm", "storm", args.storm_config, host, job_name, instance_id, start_script, http_url, **config_files)
def start_job(args, host, job_name): config_files = generate_run_scripts(args) start_script = generate_start_script(args, host, job_name) http_url = '' deploy_utils.start_job(args, "zookeeper", "zookeeper", args.zookeeper_config, host, job_name, start_script, http_url, **config_files)