def run_shell(args):
  get_zk_service_config(args)

  main_class, options = deploy_utils.parse_shell_command(
      args, SHELL_COMMAND_INFO)
  if not main_class:
    return

  args.zookeeper_config.parse_generated_config_files(args)

  client_jaas = generate_client_jaas_config(args)
  jaas_fd, jaas_file = tempfile.mkstemp(suffix='zookeeper')
  os.write(jaas_fd, client_jaas)
  os.close(jaas_fd)
  zookeeper_opts = list()
  if deploy_utils.is_security_enabled(args):
    zookeeper_opts.append("-Djava.security.auth.login.config=%s" % jaas_file)
    zookeeper_opts.append(
      "-Djava.security.krb5.conf=%s/krb5-hadoop.conf" %
      deploy_utils.get_config_dir())

  package_root = deploy_utils.get_artifact_package_root(args,
      args.zookeeper_config.cluster, "zookeeper")
  class_path = "%s/:%s/lib/*:%s/*" % (package_root, package_root, package_root)

  zk_address = "%s:%d" % (
      deploy_utils.get_zk_address(args.zookeeper_config.cluster.name),
      args.zookeeper_config.jobs["zookeeper"].base_port)

  cmd = (["java", "-cp", class_path] + zookeeper_opts + [main_class,
      "-server", zk_address] + options)
  p = subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr)
  p.wait()
Esempio n. 2
0
def run_shell(args):
  get_zk_service_config(args)

  main_class, options = deploy_utils.parse_shell_command(
      args, SHELL_COMMAND_INFO)
  if not main_class:
    return

  args.zookeeper_config.parse_generated_config_files(args)

  client_jaas = generate_client_jaas_config(args)
  jaas_fd, jaas_file = tempfile.mkstemp(suffix='zookeeper')
  os.write(jaas_fd, client_jaas)
  os.close(jaas_fd)
  zookeeper_opts = list()
  if deploy_utils.is_security_enabled(args):
    zookeeper_opts.append("-Djava.security.auth.login.config=%s" % jaas_file)
    zookeeper_opts.append(
      "-Djava.security.krb5.conf=%s/krb5-hadoop.conf" %
      deploy_utils.get_config_dir())

  package_root = deploy_utils.get_artifact_package_root(args,
      args.zookeeper_config.cluster, "zookeeper")
  class_path = "%s/:%s/lib/*:%s/*" % (package_root, package_root, package_root)

  zk_address = "%s:%d" % (
      deploy_utils.get_zk_address(args.zookeeper_config.cluster.name),
      args.zookeeper_config.jobs["zookeeper"].base_port)

  cmd = (["java", "-cp", class_path] + zookeeper_opts + [main_class,
      "-server", zk_address] + options)
  p = subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr)
  p.wait()
Esempio n. 3
0
 def __init__(self, args, options):
   # Parse collector config.
   config_path = os.path.join(deploy_utils.get_config_dir(), 'owl/collector.cfg')
   self.args = args
   self.options = options
   self.config = self.parse_config_file(config_path)
   self.services = {}
   for service_name in self.config.get("collector", "services").split():
     self.services[service_name] = CollectorConfig.Service(
         options, self.config, service_name)
   self.period = self.config.getint("collector", "period")
Esempio n. 4
0
 def __init__(self, args, options):
   # Parse collector config.
   self.options = options
   config_path = os.path.join(deploy_utils.get_config_dir(), 'owl', self.options['collector_cfg'])
   self.args = args
   self.config = self.parse_config_file(config_path)
   self.services = {}
   for service_name in self.config.get("collector", "services").split():
     self.services[service_name] = CollectorConfig.Service(
         options, self.config, service_name)
   self.period = self.config.getint("collector", "period")
Esempio n. 5
0
def run_shell(args):
    get_hbase_service_config(args)

    main_class, options = deploy_utils.parse_shell_command(
        args, SHELL_COMMAND_INFO)
    if not main_class:
        return

    # parse the service_config, suppose the instance_id is -1
    args.hbase_config.parse_generated_config_files(args)
    core_site_dict = args.hbase_config.configuration.generated_files[
        "core-site.xml"]
    hdfs_site_dict = args.hbase_config.configuration.generated_files[
        "hdfs-site.xml"]
    hbase_site_dict = args.hbase_config.configuration.generated_files[
        "hbase-site.xml"]

    hbase_opts = list()
    for key, value in core_site_dict.iteritems():
        hbase_opts.append("-D%s%s=%s" %
                          (deploy_utils.HADOOP_PROPERTY_PREFIX, key, value))
    for key, value in hdfs_site_dict.iteritems():
        hbase_opts.append("-D%s%s=%s" %
                          (deploy_utils.HADOOP_PROPERTY_PREFIX, key, value))
    for key, value in hbase_site_dict.iteritems():
        hbase_opts.append("-D%s%s=%s" %
                          (deploy_utils.HADOOP_PROPERTY_PREFIX, key, value))

    if deploy_utils.is_security_enabled(args):
        hbase_opts.append("-Djava.security.krb5.conf=%s/krb5-hadoop.conf" %
                          deploy_utils.get_config_dir())

        (jaas_fd, jaas_file) = tempfile.mkstemp()
        args.zookeeper_config.parse_generated_config_files(args)
        os.write(jaas_fd, deploy_zookeeper.generate_client_jaas_config(args))
        os.close(jaas_fd)
        hbase_opts.append("-Djava.security.auth.login.config=%s" % jaas_file)

    package_root = deploy_utils.get_artifact_package_root(
        args, args.hbase_config.cluster, "hbase")
    class_path = "%s/:%s/lib/*:%s/*" % (package_root, package_root,
                                        package_root)

    cmd = ["java", "-cp", class_path] + hbase_opts + [main_class]
    if args.command[0] == "shell":
        cmd += ["-X+O", "%s/bin/hirb.rb" % package_root]
    cmd += options
    p = subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr)
    return p.wait()
Esempio n. 6
0
 def init_kerb_user_map(self):
   res = {}
   config_path = deploy_utils.get_config_dir()
   with open(path.join(config_path, KERBEROS_IDS_PATH)) as f:
     for line in f:
       if line.startswith('#'):
         continue
       try:
         # file format: kerb_user user1[ user2 user3]
         kerb_user, email_users = line.strip().split(' ', 1)
         if kerb_user in res:
           logger.warn('Duplicated kerb user config for user: %s' % kerb_user)
         res[kerb_user] = email_users
       except Exception as e:
         logger.warn('Failed to parse user config [%r]: %s' % (e, line))
   return res
Esempio n. 7
0
def run_shell(args):
    get_yarn_service_config(args)

    main_class, options = deploy_utils.parse_shell_command(
        args, SHELL_COMMAND_INFO)
    if not main_class:
        return

    core_site_dict = args.yarn_config.configuration.generated_files[
        "core-site.xml"]
    hdfs_site_dict = args.yarn_config.configuration.generated_files[
        "hdfs-site.xml"]
    mapred_site_dict = args.yarn_config.configuration.generated_files[
        "mapred-site.xml"]
    yarn_site_dict = args.yarn_config.configuration.generated_files[
        "yarn-site.xml"]

    hadoop_opts = list()
    for key, value in core_site_dict.iteritems():
        hadoop_opts.append("-D%s%s=%s" %
                           (deploy_utils.HADOOP_PROPERTY_PREFIX, key, value))
    for key, value in hdfs_site_dict.iteritems():
        hadoop_opts.append("-D%s%s=%s" %
                           (deploy_utils.HADOOP_PROPERTY_PREFIX, key, value))
    for key, value in mapred_site_dict.iteritems():
        hadoop_opts.append("-D%s%s=%s" %
                           (deploy_utils.HADOOP_PROPERTY_PREFIX, key, value))
    for key, value in yarn_site_dict.iteritems():
        hadoop_opts.append("-D%s%s=%s" %
                           (deploy_utils.HADOOP_PROPERTY_PREFIX, key, value))

    if deploy_utils.is_security_enabled(args):
        hadoop_opts.append("-Djava.security.krb5.conf=%s/krb5-hadoop.conf" %
                           deploy_utils.get_config_dir())

    package_root = deploy_utils.get_hadoop_package_root(
        args.yarn_config.cluster.version)
    lib_root = "%s/share/hadoop" % package_root
    class_path = "%s/etc/hadoop" % package_root
    for component in ["common", "hdfs", "mapreduce", "yarn"]:
        component_dir = "%s/%s" % (lib_root, component)
        class_path += ":%s/:%s/*:%s/lib/*" % (component_dir, component_dir,
                                              component_dir)

    cmd = (["java", "-cp", class_path] + hadoop_opts + [main_class] + options)
    p = subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr)
    p.wait()
Esempio n. 8
0
def run_shell(args):
  get_yarn_service_config(args)

  main_class, options = deploy_utils.parse_shell_command(
      args, SHELL_COMMAND_INFO)
  if not main_class:
    return

  # parse the service_config, suppose the instance_id is -1
  args.yarn_config.parse_generated_config_files(args)
  core_site_dict = args.yarn_config.configuration.generated_files["core-site.xml"]
  hdfs_site_dict = args.yarn_config.configuration.generated_files["hdfs-site.xml"]
  mapred_site_dict = args.yarn_config.configuration.generated_files["mapred-site.xml"]
  yarn_site_dict = args.yarn_config.configuration.generated_files["yarn-site.xml"]

  hadoop_opts = list()
  for key, value in core_site_dict.iteritems():
    hadoop_opts.append("-D%s%s=%s" % (deploy_utils.HADOOP_PROPERTY_PREFIX,
          key, value))
  for key, value in hdfs_site_dict.iteritems():
    hadoop_opts.append("-D%s%s=%s" % (deploy_utils.HADOOP_PROPERTY_PREFIX,
          key, value))
  for key, value in mapred_site_dict.iteritems():
    hadoop_opts.append("-D%s%s=%s" % (deploy_utils.HADOOP_PROPERTY_PREFIX,
          key, value))
  for key, value in yarn_site_dict.iteritems():
    hadoop_opts.append("-D%s%s=%s" % (deploy_utils.HADOOP_PROPERTY_PREFIX,
          key, value))

  if deploy_utils.is_security_enabled(args):
    hadoop_opts.append(
        "-Djava.security.krb5.conf=%s/krb5-hadoop.conf" %
        deploy_utils.get_config_dir())

  package_root = deploy_utils.get_artifact_package_root(args,
      args.yarn_config.cluster, "hadoop")
  lib_root = "%s/share/hadoop" % package_root
  class_path = "%s/etc/hadoop" % package_root
  for component in ["common", "hdfs", "mapreduce", "yarn"]:
    component_dir = "%s/%s" % (lib_root, component)
    class_path += ":%s/:%s/*:%s/lib/*" % (component_dir,
        component_dir, component_dir)

  cmd = (["java", "-cp", class_path] + hadoop_opts +
      [main_class] + options)
  p = subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr)
  p.wait()
def run_shell(args):
  get_hbase_service_config(args)

  main_class, options = deploy_utils.parse_shell_command(
      args, SHELL_COMMAND_INFO)
  if not main_class:
    return

  # parse the service_config, suppose the instance_id is -1
  args.hbase_config.parse_generated_config_files(args)
  core_site_dict = args.hbase_config.configuration.generated_files["core-site.xml"]
  hdfs_site_dict = args.hbase_config.configuration.generated_files["hdfs-site.xml"]
  hbase_site_dict = args.hbase_config.configuration.generated_files["hbase-site.xml"]

  hbase_opts = list()
  for key, value in core_site_dict.iteritems():
    hbase_opts.append("-D%s%s=%s" % (deploy_utils.HADOOP_PROPERTY_PREFIX,
          key, value))
  for key, value in hdfs_site_dict.iteritems():
    hbase_opts.append("-D%s%s=%s" % (deploy_utils.HADOOP_PROPERTY_PREFIX,
          key, value))
  for key, value in hbase_site_dict.iteritems():
    hbase_opts.append("-D%s%s=%s" % (deploy_utils.HADOOP_PROPERTY_PREFIX,
          key, value))

  if deploy_utils.is_security_enabled(args):
    hbase_opts.append("-Djava.security.krb5.conf=%s/krb5-hadoop.conf" %
        deploy_utils.get_config_dir())

    (jaas_fd, jaas_file) = tempfile.mkstemp()
    args.zookeeper_config.parse_generated_config_files(args)
    os.write(jaas_fd, deploy_zookeeper.generate_client_jaas_config(args))
    os.close(jaas_fd)
    hbase_opts.append("-Djava.security.auth.login.config=%s" % jaas_file)

  package_root = deploy_utils.get_artifact_package_root(args,
      args.hbase_config.cluster, "hbase")
  class_path = "%s/:%s/lib/*:%s/*" % (package_root, package_root, package_root)

  cmd = ["java", "-cp", class_path] + hbase_opts + [main_class]
  if args.command[0] == "shell":
    cmd += ["-X+O", "%s/bin/hirb.rb" % package_root]
  cmd += options
  p = subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr)
  return p.wait()
Esempio n. 10
0
def run_shell(args):
    get_hdfs_service_config(args)

    main_class, options = deploy_utils.parse_shell_command(
        args, SHELL_COMMAND_INFO)
    if not main_class:
        return
    # parse the service_config, suppose the instance_id is -1
    args.hdfs_config.parse_generated_config_files(args)
    core_site_dict = args.hdfs_config.configuration.generated_files[
        "core-site.xml"]
    hdfs_site_dict = args.hdfs_config.configuration.generated_files[
        "hdfs-site.xml"]

    hadoop_opts = list()
    for key, value in core_site_dict.iteritems():
        hadoop_opts.append("-D%s%s=%s" %
                           (deploy_utils.HADOOP_PROPERTY_PREFIX, key, value))
    for key, value in hdfs_site_dict.iteritems():
        hadoop_opts.append("-D%s%s=%s" %
                           (deploy_utils.HADOOP_PROPERTY_PREFIX, key, value))

    package_root = deploy_utils.get_artifact_package_root(
        args, args.hdfs_config.cluster, "hadoop")
    lib_root = "%s/share/hadoop" % package_root
    class_path = "%s/etc/hadoop" % package_root
    for component in ["common", "hdfs"]:
        component_dir = "%s/%s" % (lib_root, component)
        class_path += ":%s/:%s/*:%s/lib/*" % (component_dir, component_dir,
                                              component_dir)

    if deploy_utils.is_security_enabled(args):
        boot_class_path = "%s/common/lib/hadoop-security-%s.jar" % (
            lib_root, args.hdfs_config.cluster.version)
        hadoop_opts.append("-Xbootclasspath/p:%s" % boot_class_path)
        hadoop_opts.append("-Dkerberos.instance=hadoop")
        hadoop_opts.append("-Djava.security.krb5.conf=%s/krb5-hadoop.conf" %
                           deploy_utils.get_config_dir())

    cmd = (["java", "-cp", class_path] + hadoop_opts + [main_class] + options)
    p = subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr)
    p.wait()
Esempio n. 11
0
def run_shell(args):
  get_hdfs_service_config(args)

  main_class, options = deploy_utils.parse_shell_command(
      args, SHELL_COMMAND_INFO)
  if not main_class:
    return

  core_site_dict = args.hdfs_config.configuration.generated_files["core-site.xml"]
  hdfs_site_dict = args.hdfs_config.configuration.generated_files["hdfs-site.xml"]

  hadoop_opts = list()
  for key, value in core_site_dict.iteritems():
    hadoop_opts.append("-D%s%s=%s" % (deploy_utils.HADOOP_PROPERTY_PREFIX,
          key, value))
  for key, value in hdfs_site_dict.iteritems():
    hadoop_opts.append("-D%s%s=%s" % (deploy_utils.HADOOP_PROPERTY_PREFIX,
          key, value))

  package_root = deploy_utils.get_hadoop_package_root(
      args.hdfs_config.cluster.version)
  lib_root = "%s/share/hadoop" % package_root
  class_path = "%s/etc/hadoop" % package_root
  for component in ["common", "hdfs"]:
    component_dir = "%s/%s" % (lib_root, component)
    class_path += ":%s/:%s/*:%s/lib/*" % (component_dir,
        component_dir, component_dir)

  if deploy_utils.is_security_enabled(args):
    boot_class_path = "%s/common/lib/hadoop-security-%s.jar" % (lib_root,
        args.hdfs_config.cluster.version)
    hadoop_opts.append("-Xbootclasspath/p:%s" % boot_class_path)
    hadoop_opts.append("-Dkerberos.instance=hadoop")
    hadoop_opts.append(
        "-Djava.security.krb5.conf=%s/krb5-hadoop.conf" %
        deploy_utils.get_config_dir())

  cmd = (["java", "-cp", class_path] + hadoop_opts +
      [main_class] + options)
  p = subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr)
  p.wait()