Beispiel #1
0
def generate_run_scripts_params(args, host, job_name, host_id, instance_id):
    job = args.fds_config.jobs[job_name]

    supervisor_client = deploy_utils.get_supervisor_client(
        host,
        "fds",
        args.fds_config.cluster.name,
        job_name,
        instance_id=instance_id)

    artifact_and_version = "galaxy-fds-" + args.fds_config.cluster.version

    component_dir = "$package_dir"
    jar_dirs = "%s/lib/guava-11.0.2.jar:%s/:%s/lib/*" % (
        component_dir, component_dir, component_dir)
    log_level = deploy_utils.get_service_log_level(args, args.fds_config)

    params = job.get_arguments(args, args.fds_config.cluster,
                               args.fds_config.jobs,
                               args.fds_config.arguments_dict, job_name,
                               host_id, instance_id)

    script_dict = {
        "artifact": artifact_and_version,
        "job_name": job_name,
        "jar_dirs": jar_dirs,
        "run_dir": supervisor_client.get_run_dir(),
        "params": params,
    }

    return script_dict
Beispiel #2
0
def generate_run_scripts_params(args, host, job_name, host_id, instance_id):
  job = args.yarn_config.jobs[job_name]

  supervisor_client = deploy_utils.get_supervisor_client(host,
      "yarn", args.yarn_config.cluster.name, job_name, instance_id=instance_id)

  artifact_and_version = "hadoop-" + args.yarn_config.cluster.version

  jar_dirs = ""
  for component in ["common", "mapreduce", "yarn", "hdfs"]:
    if jar_dirs: jar_dirs += ":"
    component_dir = ("$package_dir/share/hadoop/%s" % component)
    jar_dirs += "%s/:%s/lib/*:%s/*" % (
        component_dir, component_dir, component_dir)

  service_env = ""
  for component_path in ["HADOOP_COMMON_HOME", "HADOOP_HDFS_HOME", "YARN_HOME"]:
    service_env += "export %s=$package_dir\n" % (component_path)
  log_level = deploy_utils.get_service_log_level(args, args.yarn_config)

  params = job.get_arguments(args, args.yarn_config.cluster, args.yarn_config.jobs,
    args.yarn_config.arguments_dict, job_name, host_id, instance_id)

  script_dict = {
      "artifact": artifact_and_version,
      "job_name": job_name,
      "jar_dirs": jar_dirs,
      "run_dir": supervisor_client.get_run_dir(),
      "service_env": service_env,
      "params": params,
  }

  return script_dict
Beispiel #3
0
def generate_run_scripts_params(args, host, job_name, host_id, instance_id):
  job = args.hdfs_config.jobs[job_name]

  supervisor_client = deploy_utils.get_supervisor_client(host,
      "hdfs", args.hdfs_config.cluster.name, job_name, instance_id=instance_id)

  artifact_and_version = "hadoop-" + args.hdfs_config.cluster.version

  jar_dirs = ""
  # must include both [dir]/ and [dir]/* as [dir]/* only import all jars under
  # this dir but we also need access the webapps under this dir.
  for component in ["common", "hdfs"]:
    if jar_dirs: jar_dirs += ":"
    component_dir = ("$package_dir/share/hadoop/%s" % component)
    jar_dirs += "%s/:%s/lib/*:%s/*" % (
        component_dir, component_dir, component_dir)
  log_level = deploy_utils.get_service_log_level(args, args.hdfs_config)

  params = job.get_arguments(args, args.hdfs_config.cluster, args.hdfs_config.jobs,
    args.hdfs_config.arguments_dict, job_name, host_id, instance_id)

  script_dict = {
      "artifact": artifact_and_version,
      "job_name": job_name,
      "jar_dirs": jar_dirs,
      "run_dir": supervisor_client.get_run_dir(),
      "params": params,
  }

  return script_dict
def generate_start_script(args, host, job_name, host_id, instance_id):
  supervisor_client = deploy_utils.get_supervisor_client(host,
      "zookeeper", args.zookeeper_config.cluster.name, job_name, instance_id=instance_id)
  run_dir = supervisor_client.get_run_dir()

  artifact_and_version = "zookeeper-" + args.zookeeper_config.cluster.version
  component_dir = "$package_dir"
  # must include both [dir]/ and [dir]/* as [dir]/* only import all jars under
  # this dir but we also need access the webapps under this dir.
  jar_dirs = "%s/:%s/lib/*:%s/*" % (component_dir, component_dir, component_dir)
  job = args.zookeeper_config.jobs["zookeeper"]
  log_level = deploy_utils.get_service_log_level(args, args.zookeeper_config)

  params = job.get_arguments(args, args.zookeeper_config.cluster, args.zookeeper_config.jobs,
    args.zookeeper_config.arguments_dict, job_name, host_id, instance_id)

  script_dict = {
      "artifact": artifact_and_version,
      "job_name": job_name,
      "jar_dirs": jar_dirs,
      "run_dir": run_dir,
      "params": params,
  }

  return deploy_utils.create_run_script(
      '%s/start.sh.tmpl' % deploy_utils.get_template_dir(),
      script_dict)
Beispiel #5
0
def generate_run_scripts_params(args, host, job_name, host_id, instance_id):
    job = args.hbase_config.jobs[job_name]

    supervisor_client = deploy_utils.get_supervisor_client(
        host,
        "hbase",
        args.hbase_config.cluster.name,
        job_name,
        instance_id=instance_id)

    artifact_and_version = "hbase-" + args.hbase_config.cluster.version

    component_dir = "$package_dir/"
    # must include both [dir]/ and [dir]/* as [dir]/* only import all jars under
    # this dir but we also need access the webapps under this dir.
    jar_dirs = "%s/:%s/lib/*:%s/*" % (component_dir, component_dir,
                                      component_dir)
    log_level = deploy_utils.get_service_log_level(args, args.hbase_config)

    params = job.get_arguments(args, args.hbase_config.cluster,
                               args.hbase_config.jobs,
                               args.hbase_config.arguments_dict, job_name,
                               host_id, instance_id)

    script_dict = {
        "artifact": artifact_and_version,
        "job_name": job_name,
        "jar_dirs": jar_dirs,
        "run_dir": supervisor_client.get_run_dir(),
        "params": params,
    }

    return script_dict
Beispiel #6
0
def generate_run_scripts_params(args, host, job_name, host_id, instance_id):
  job = args.yarn_config.jobs[job_name]

  supervisor_client = deploy_utils.get_supervisor_client(host,
      "yarn", args.yarn_config.cluster.name, job_name, instance_id=instance_id)

  artifact_and_version = "hadoop-" + args.yarn_config.cluster.version

  jar_dirs = ""
  for component in ["common", "mapreduce", "yarn", "hdfs"]:
    if jar_dirs: jar_dirs += ":"
    component_dir = ("$package_dir/share/hadoop/%s" % component)
    jar_dirs += "%s/:%s/lib/*:%s/*" % (
        component_dir, component_dir, component_dir)

  service_env = ""
  for component_path in ["HADOOP_COMMON_HOME", "HADOOP_HDFS_HOME", "YARN_HOME"]:
    service_env += "export %s=$package_dir\n" % (component_path)
  log_level = deploy_utils.get_service_log_level(args, args.yarn_config)

  params = job.get_arguments(args, args.yarn_config.cluster, args.yarn_config.jobs,
    args.yarn_config.arguments_dict, job_name, host_id, instance_id)

  script_dict = {
      "artifact": artifact_and_version,
      "job_name": job_name,
      "jar_dirs": jar_dirs,
      "run_dir": supervisor_client.get_run_dir(),
      "service_env": service_env,
      "params": params,
  }

  return script_dict
Beispiel #7
0
def generate_run_scripts_params(args, host, job_name, host_id, instance_id):
    supervisor_client = deploy_utils.get_supervisor_client(
        host,
        "impala",
        args.impala_config.cluster.name,
        job_name,
        instance_id=instance_id)
    job = args.impala_config.jobs[job_name]

    artifact_and_version = "impala-" + args.impala_config.cluster.version
    log_level = deploy_utils.get_service_log_level(args, args.impala_config)

    params = job.get_arguments(args, args.impala_config.cluster,
                               args.impala_config.jobs,
                               args.impala_config.arguments_dict, job_name,
                               host_id, instance_id)

    script_dict = {
        "artifact": artifact_and_version,
        "job_name": job_name,
        "run_dir": supervisor_client.get_run_dir(),
        "ticket_cache": "$run_dir/impala.tc",
        "log_level": log_level,
        "params": params,
    }

    return script_dict
Beispiel #8
0
def generate_start_script(args, host, job_name, host_id, instance_id):
  supervisor_client = deploy_utils.get_supervisor_client(host,
      "zookeeper", args.zookeeper_config.cluster.name, job_name, instance_id=instance_id)
  run_dir = supervisor_client.get_run_dir()

  artifact_and_version = "zookeeper-" + args.zookeeper_config.cluster.version
  component_dir = "$package_dir"
  # must include both [dir]/ and [dir]/* as [dir]/* only import all jars under
  # this dir but we also need access the webapps under this dir.
  jar_dirs = "%s/:%s/lib/*:%s/*" % (component_dir, component_dir, component_dir)
  job = args.zookeeper_config.jobs["zookeeper"]
  log_level = deploy_utils.get_service_log_level(args, args.zookeeper_config)

  params = job.get_arguments(args, args.zookeeper_config.cluster, args.zookeeper_config.jobs,
    args.zookeeper_config.arguments_dict, job_name, host_id, instance_id)

  script_dict = {
      "artifact": artifact_and_version,
      "job_name": job_name,
      "jar_dirs": jar_dirs,
      "run_dir": run_dir,
      "params": params,
  }

  return deploy_utils.create_run_script(
      '%s/start.sh.tmpl' % deploy_utils.get_template_dir(),
      script_dict)
Beispiel #9
0
def generate_run_scripts_params(args, host, job_name, host_id, instance_id):
    job = args.kafka_config.jobs[job_name]

    supervisor_client = deploy_utils.get_supervisor_client(
        host,
        "kafka",
        args.kafka_config.cluster.name,
        job_name,
        instance_id=instance_id)

    artifact_and_version = "kafka-" + args.kafka_config.cluster.version

    jar_dirs = "$package_dir/*"
    log_level = deploy_utils.get_service_log_level(args, args.kafka_config)

    params = job.get_arguments(args, args.kafka_config.cluster,
                               args.kafka_config.jobs,
                               args.kafka_config.arguments_dict, job_name,
                               host_id, instance_id)

    script_dict = {
        "artifact": artifact_and_version,
        "job_name": job_name,
        "jar_dirs": jar_dirs,
        "run_dir": supervisor_client.get_run_dir(),
        "params": params,
    }

    return script_dict
Beispiel #10
0
def generate_run_scripts_params(args, host, job_name, host_id, instance_id):
  job = args.fds_config.jobs[job_name]

  supervisor_client = deploy_utils.get_supervisor_client(host,
      "fds", args.fds_config.cluster.name, job_name, instance_id=instance_id)

  artifact_and_version = "galaxy-fds-" + args.fds_config.cluster.version

  component_dir = "$package_dir"
  jar_dirs = "%s/lib/guava-11.0.2.jar:%s/:%s/lib/*" % (
    component_dir, component_dir, component_dir)
  log_level = deploy_utils.get_service_log_level(args, args.fds_config)

  params = job.get_arguments(args, args.fds_config.cluster, args.fds_config.jobs,
    args.fds_config.arguments_dict, job_name, host_id, instance_id)

  script_dict = {
      "artifact": artifact_and_version,
      "job_name": job_name,
      "jar_dirs": jar_dirs,
      "run_dir": supervisor_client.get_run_dir(),
      "params": params,
  }

  return script_dict
Beispiel #11
0
def generate_run_scripts_params(args, host, job_name, host_id, instance_id):
    supervisor_client = deploy_utils.get_supervisor_client(
        host, "impala", args.impala_config.cluster.name, job_name, instance_id=instance_id
    )
    job = args.impala_config.jobs[job_name]

    artifact_and_version = "impala-" + args.impala_config.cluster.version
    log_level = deploy_utils.get_service_log_level(args, args.impala_config)

    params = job.get_arguments(
        args,
        args.impala_config.cluster,
        args.impala_config.jobs,
        args.impala_config.arguments_dict,
        job_name,
        host_id,
        instance_id,
    )

    script_dict = {
        "artifact": artifact_and_version,
        "job_name": job_name,
        "run_dir": supervisor_client.get_run_dir(),
        "ticket_cache": "$run_dir/impala.tc",
        "log_level": log_level,
        "params": params,
    }

    return script_dict
Beispiel #12
0
def generate_run_scripts_params(args, host, job_name, host_id, instance_id):
  job = args.storm_config.jobs[job_name]

  supervisor_client = deploy_utils.get_supervisor_client(host,
      "storm", args.storm_config.cluster.name, job_name, instance_id=instance_id)

  artifact_and_version = "storm-" + args.storm_config.cluster.version

  jar_dirs = "$package_dir/*"
  log_level = deploy_utils.get_service_log_level(args, args.storm_config)

  params = job.get_arguments(args, args.storm_config.cluster, args.storm_config.jobs,
    args.storm_config.arguments_dict, job_name, host_id, instance_id)

  service_env = "export SUPERVISOR_LOG_DIR=%s" % deploy_utils.get_supervisor_client(host,
    "storm", args.storm_config.cluster.name, 'supervisor', instance_id=instance_id).get_log_dir()

  script_dict = {
      "artifact": artifact_and_version,
      "job_name": job_name,
      "jar_dirs": jar_dirs,
      "run_dir": supervisor_client.get_run_dir(),
      "service_env": service_env,
      "params": params,
  }

  return script_dict
Beispiel #13
0
def generate_run_scripts_params(args, host, job_name, host_id, instance_id):
  job = args.storm_config.jobs[job_name]

  supervisor_client = deploy_utils.get_supervisor_client(host,
      "storm", args.storm_config.cluster.name, job_name, instance_id=instance_id)

  artifact_and_version = "apache-storm-" + args.storm_config.cluster.version

  component_dir = "$package_dir"
  jar_dirs = "%s/:%s/lib/*:%s/*" % (component_dir, component_dir, component_dir)
  log_level = deploy_utils.get_service_log_level(args, args.storm_config)

  params = job.get_arguments(args, args.storm_config.cluster, args.storm_config.jobs,
    args.storm_config.arguments_dict, job_name, host_id, instance_id)

  service_env = "export SUPERVISOR_LOG_DIR=%s" % deploy_utils.get_supervisor_client(host,
    "storm", args.storm_config.cluster.name, 'supervisor', instance_id=instance_id).get_log_dir()

  script_dict = {
      "artifact": artifact_and_version,
      "job_name": job_name,
      "jar_dirs": jar_dirs,
      "run_dir": supervisor_client.get_run_dir(),
      "service_env": service_env,
      "params": params,
  }

  return script_dict
Beispiel #14
0
def generate_run_scripts_params(args, host, job_name, host_id, instance_id):
  job = args.kafka_config.jobs[job_name]

  supervisor_client = deploy_utils.get_supervisor_client(host,
      "kafka", args.kafka_config.cluster.name, job_name, instance_id=instance_id)

  artifact_and_version = "kafka-" + args.kafka_config.cluster.version

  jar_dirs = "$package_dir/*"
  log_level = deploy_utils.get_service_log_level(args, args.kafka_config)

  params = job.get_arguments(args, args.kafka_config.cluster, args.kafka_config.jobs,
    args.kafka_config.arguments_dict, job_name, host_id, instance_id)

  script_dict = {
      "artifact": artifact_and_version,
      "job_name": job_name,
      "jar_dirs": jar_dirs,
      "run_dir": supervisor_client.get_run_dir(),
      "params": params,
  }

  return script_dict