def run_shell(args):
  get_zk_service_config(args)

  main_class, options = deploy_utils.parse_shell_command(
      args, SHELL_COMMAND_INFO)
  if not main_class:
    return

  args.zookeeper_config.parse_generated_config_files(args)

  client_jaas = generate_client_jaas_config(args)
  jaas_fd, jaas_file = tempfile.mkstemp(suffix='zookeeper')
  os.write(jaas_fd, client_jaas)
  os.close(jaas_fd)
  zookeeper_opts = list()
  if deploy_utils.is_security_enabled(args):
    zookeeper_opts.append("-Djava.security.auth.login.config=%s" % jaas_file)
    zookeeper_opts.append(
      "-Djava.security.krb5.conf=%s/krb5-hadoop.conf" %
      deploy_utils.get_config_dir())

  package_root = deploy_utils.get_artifact_package_root(args,
      args.zookeeper_config.cluster, "zookeeper")
  class_path = "%s/:%s/lib/*:%s/*" % (package_root, package_root, package_root)

  zk_address = "%s:%d" % (
      deploy_utils.get_zk_address(args.zookeeper_config.cluster.name),
      args.zookeeper_config.jobs["zookeeper"].base_port)

  cmd = (["java", "-cp", class_path] + zookeeper_opts + [main_class,
      "-server", zk_address] + options)
  p = subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr)
  p.wait()
Пример #2
0
def run_shell(args):
  get_zk_service_config(args)

  main_class, options = deploy_utils.parse_shell_command(
      args, SHELL_COMMAND_INFO)
  if not main_class:
    return

  args.zookeeper_config.parse_generated_config_files(args)

  client_jaas = generate_client_jaas_config(args)
  jaas_fd, jaas_file = tempfile.mkstemp(suffix='zookeeper')
  os.write(jaas_fd, client_jaas)
  os.close(jaas_fd)
  zookeeper_opts = list()
  if deploy_utils.is_security_enabled(args):
    zookeeper_opts.append("-Djava.security.auth.login.config=%s" % jaas_file)
    zookeeper_opts.append(
      "-Djava.security.krb5.conf=%s/krb5-hadoop.conf" %
      deploy_utils.get_config_dir())

  package_root = deploy_utils.get_artifact_package_root(args,
      args.zookeeper_config.cluster, "zookeeper")
  class_path = "%s/:%s/lib/*:%s/*" % (package_root, package_root, package_root)

  zk_address = "%s:%d" % (
      deploy_utils.get_zk_address(args.zookeeper_config.cluster.name),
      args.zookeeper_config.jobs["zookeeper"].base_port)

  cmd = (["java", "-cp", class_path] + zookeeper_opts + [main_class,
      "-server", zk_address] + options)
  p = subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr)
  p.wait()
def recover_region_server(args, ip, port):
  package_root = deploy_utils.get_artifact_package_root(args,
      args.hbase_config.cluster, "hbase")
  Log.print_info("Recover region server: " + ip);
  host = socket.gethostbyaddr(ip)[0]
  args.command = ["ruby", "%s/bin/region_mover.rb" % package_root,
    "load", "%s:%d" % (host, port)]
  if run_shell(args) != 0:
    Log.print_critical("Load host %s failed." % host);
Пример #4
0
def run_shell(args):
    get_hbase_service_config(args)

    main_class, options = deploy_utils.parse_shell_command(
        args, SHELL_COMMAND_INFO)
    if not main_class:
        return

    # parse the service_config, suppose the instance_id is -1
    args.hbase_config.parse_generated_config_files(args)
    core_site_dict = args.hbase_config.configuration.generated_files[
        "core-site.xml"]
    hdfs_site_dict = args.hbase_config.configuration.generated_files[
        "hdfs-site.xml"]
    hbase_site_dict = args.hbase_config.configuration.generated_files[
        "hbase-site.xml"]

    hbase_opts = list()
    for key, value in core_site_dict.iteritems():
        hbase_opts.append("-D%s%s=%s" %
                          (deploy_utils.HADOOP_PROPERTY_PREFIX, key, value))
    for key, value in hdfs_site_dict.iteritems():
        hbase_opts.append("-D%s%s=%s" %
                          (deploy_utils.HADOOP_PROPERTY_PREFIX, key, value))
    for key, value in hbase_site_dict.iteritems():
        hbase_opts.append("-D%s%s=%s" %
                          (deploy_utils.HADOOP_PROPERTY_PREFIX, key, value))

    if deploy_utils.is_security_enabled(args):
        hbase_opts.append("-Djava.security.krb5.conf=%s/krb5-hadoop.conf" %
                          deploy_utils.get_config_dir())

        (jaas_fd, jaas_file) = tempfile.mkstemp()
        args.zookeeper_config.parse_generated_config_files(args)
        os.write(jaas_fd, deploy_zookeeper.generate_client_jaas_config(args))
        os.close(jaas_fd)
        hbase_opts.append("-Djava.security.auth.login.config=%s" % jaas_file)

    package_root = deploy_utils.get_artifact_package_root(
        args, args.hbase_config.cluster, "hbase")
    class_path = "%s/:%s/lib/*:%s/*" % (package_root, package_root,
                                        package_root)

    cmd = ["java", "-cp", class_path] + hbase_opts + [main_class]
    if args.command[0] == "shell":
        cmd += ["-X+O", "%s/bin/hirb.rb" % package_root]
    cmd += options
    p = subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr)
    return p.wait()
Пример #5
0
def recover_region_server(args, ip, port):
    package_root = deploy_utils.get_artifact_package_root(
        args, args.hbase_config.cluster, "hbase")
    Log.print_info("Recover region server: " + ip)
    try:
        host = socket.gethostbyaddr(ip)[0]
    except:
        host = ip
    args.command = [
        "ruby",
        "%s/bin/region_mover.rb" % package_root, "load",
        "%s:%d" % (host, port)
    ]
    if run_shell(args) != 0:
        Log.print_critical("Load host %s failed." % host)
Пример #6
0
def run_shell(args):
  get_yarn_service_config(args)

  main_class, options = deploy_utils.parse_shell_command(
      args, SHELL_COMMAND_INFO)
  if not main_class:
    return

  # parse the service_config, suppose the instance_id is -1
  args.yarn_config.parse_generated_config_files(args)
  core_site_dict = args.yarn_config.configuration.generated_files["core-site.xml"]
  hdfs_site_dict = args.yarn_config.configuration.generated_files["hdfs-site.xml"]
  mapred_site_dict = args.yarn_config.configuration.generated_files["mapred-site.xml"]
  yarn_site_dict = args.yarn_config.configuration.generated_files["yarn-site.xml"]

  hadoop_opts = list()
  for key, value in core_site_dict.iteritems():
    hadoop_opts.append("-D%s%s=%s" % (deploy_utils.HADOOP_PROPERTY_PREFIX,
          key, value))
  for key, value in hdfs_site_dict.iteritems():
    hadoop_opts.append("-D%s%s=%s" % (deploy_utils.HADOOP_PROPERTY_PREFIX,
          key, value))
  for key, value in mapred_site_dict.iteritems():
    hadoop_opts.append("-D%s%s=%s" % (deploy_utils.HADOOP_PROPERTY_PREFIX,
          key, value))
  for key, value in yarn_site_dict.iteritems():
    hadoop_opts.append("-D%s%s=%s" % (deploy_utils.HADOOP_PROPERTY_PREFIX,
          key, value))

  if deploy_utils.is_security_enabled(args):
    hadoop_opts.append(
        "-Djava.security.krb5.conf=%s/krb5-hadoop.conf" %
        deploy_utils.get_config_dir())

  package_root = deploy_utils.get_artifact_package_root(args,
      args.yarn_config.cluster, "hadoop")
  lib_root = "%s/share/hadoop" % package_root
  class_path = "%s/etc/hadoop" % package_root
  for component in ["common", "hdfs", "mapreduce", "yarn"]:
    component_dir = "%s/%s" % (lib_root, component)
    class_path += ":%s/:%s/*:%s/lib/*" % (component_dir,
        component_dir, component_dir)

  cmd = (["java", "-cp", class_path] + hadoop_opts +
      [main_class] + options)
  p = subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr)
  p.wait()
Пример #7
0
def run_shell(args):
  get_yarn_service_config(args)

  main_class, options = deploy_utils.parse_shell_command(
      args, SHELL_COMMAND_INFO)
  if not main_class:
    return

  # parse the service_config, suppose the instance_id is -1
  args.yarn_config.parse_generated_config_files(args)
  core_site_dict = args.yarn_config.configuration.generated_files["core-site.xml"]
  hdfs_site_dict = args.yarn_config.configuration.generated_files["hdfs-site.xml"]
  mapred_site_dict = args.yarn_config.configuration.generated_files["mapred-site.xml"]
  yarn_site_dict = args.yarn_config.configuration.generated_files["yarn-site.xml"]

  hadoop_opts = list()
  for key, value in core_site_dict.iteritems():
    hadoop_opts.append("-D%s%s=%s" % (deploy_utils.HADOOP_PROPERTY_PREFIX,
          key, value))
  for key, value in hdfs_site_dict.iteritems():
    hadoop_opts.append("-D%s%s=%s" % (deploy_utils.HADOOP_PROPERTY_PREFIX,
          key, value))
  for key, value in mapred_site_dict.iteritems():
    hadoop_opts.append("-D%s%s=%s" % (deploy_utils.HADOOP_PROPERTY_PREFIX,
          key, value))
  for key, value in yarn_site_dict.iteritems():
    hadoop_opts.append("-D%s%s=%s" % (deploy_utils.HADOOP_PROPERTY_PREFIX,
          key, value))

  if deploy_utils.is_security_enabled(args):
    hadoop_opts.append(
        "-Djava.security.krb5.conf=%s/krb5-hadoop.conf" %
        deploy_utils.get_config_dir())

  package_root = deploy_utils.get_artifact_package_root(args,
      args.yarn_config.cluster, "hadoop")
  lib_root = "%s/share/hadoop" % package_root
  class_path = "%s/etc/hadoop" % package_root
  for component in ["common", "hdfs", "mapreduce", "yarn"]:
    component_dir = "%s/%s" % (lib_root, component)
    class_path += ":%s/:%s/*:%s/lib/*" % (component_dir,
        component_dir, component_dir)

  cmd = (["java", "-cp", class_path] + hadoop_opts +
      [main_class] + options)
  p = subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr)
  p.wait()
def run_shell(args):
  get_hbase_service_config(args)

  main_class, options = deploy_utils.parse_shell_command(
      args, SHELL_COMMAND_INFO)
  if not main_class:
    return

  # parse the service_config, suppose the instance_id is -1
  args.hbase_config.parse_generated_config_files(args)
  core_site_dict = args.hbase_config.configuration.generated_files["core-site.xml"]
  hdfs_site_dict = args.hbase_config.configuration.generated_files["hdfs-site.xml"]
  hbase_site_dict = args.hbase_config.configuration.generated_files["hbase-site.xml"]

  hbase_opts = list()
  for key, value in core_site_dict.iteritems():
    hbase_opts.append("-D%s%s=%s" % (deploy_utils.HADOOP_PROPERTY_PREFIX,
          key, value))
  for key, value in hdfs_site_dict.iteritems():
    hbase_opts.append("-D%s%s=%s" % (deploy_utils.HADOOP_PROPERTY_PREFIX,
          key, value))
  for key, value in hbase_site_dict.iteritems():
    hbase_opts.append("-D%s%s=%s" % (deploy_utils.HADOOP_PROPERTY_PREFIX,
          key, value))

  if deploy_utils.is_security_enabled(args):
    hbase_opts.append("-Djava.security.krb5.conf=%s/krb5-hadoop.conf" %
        deploy_utils.get_config_dir())

    (jaas_fd, jaas_file) = tempfile.mkstemp()
    args.zookeeper_config.parse_generated_config_files(args)
    os.write(jaas_fd, deploy_zookeeper.generate_client_jaas_config(args))
    os.close(jaas_fd)
    hbase_opts.append("-Djava.security.auth.login.config=%s" % jaas_file)

  package_root = deploy_utils.get_artifact_package_root(args,
      args.hbase_config.cluster, "hbase")
  class_path = "%s/:%s/lib/*:%s/*" % (package_root, package_root, package_root)

  cmd = ["java", "-cp", class_path] + hbase_opts + [main_class]
  if args.command[0] == "shell":
    cmd += ["-X+O", "%s/bin/hirb.rb" % package_root]
  cmd += options
  p = subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr)
  return p.wait()