Beispiel #1
0
# hadoop parameters for stacks that support rolling_upgrade
if stack_version_formatted and check_stack_feature(
        StackFeature.ROLLING_UPGRADE, stack_version_formatted):
    mapreduce_libs_path = format(
        "{stack_root}/current/hadoop-mapreduce-client/*")

    if not security_enabled:
        hadoop_secure_dn_user = '******'
    else:
        dfs_dn_port = utils.get_port(dfs_dn_addr)
        dfs_dn_http_port = utils.get_port(dfs_dn_http_addr)
        dfs_dn_https_port = utils.get_port(dfs_dn_https_addr)
        # We try to avoid inability to start datanode as a plain user due to usage of root-owned ports
        if dfs_http_policy == "HTTPS_ONLY":
            secure_dn_ports_are_in_use = utils.is_secure_port(
                dfs_dn_port) or utils.is_secure_port(dfs_dn_https_port)
        elif dfs_http_policy == "HTTP_AND_HTTPS":
            secure_dn_ports_are_in_use = utils.is_secure_port(
                dfs_dn_port) or utils.is_secure_port(
                    dfs_dn_http_port) or utils.is_secure_port(
                        dfs_dn_https_port)
        else:  # params.dfs_http_policy == "HTTP_ONLY" or not defined:
            secure_dn_ports_are_in_use = utils.is_secure_port(
                dfs_dn_port) or utils.is_secure_port(dfs_dn_http_port)
        if secure_dn_ports_are_in_use:
            hadoop_secure_dn_user = hdfs_user
        else:
            hadoop_secure_dn_user = '******'

ambari_libs_dir = "/var/lib/ambari-agent/lib"
limits_conf_dir = "/etc/security/limits.d"
hadoop_conf_secure_dir = os.path.join(hadoop_conf_dir, "secure")
hadoop_lib_home = hdp_select.get_hadoop_dir("lib")

# hadoop parameters for 2.2+
if Script.is_hdp_stack_greater_or_equal("2.2"):
  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"

  if not security_enabled:
    hadoop_secure_dn_user = '******'
  else:
    dfs_dn_port = utils.get_port(dfs_dn_addr)
    dfs_dn_http_port = utils.get_port(dfs_dn_http_addr)
    dfs_dn_https_port = utils.get_port(dfs_dn_https_addr)
    # We try to avoid inability to start datanode as a plain user due to usage of root-owned ports
    if dfs_http_policy == "HTTPS_ONLY":
      secure_dn_ports_are_in_use = utils.is_secure_port(dfs_dn_port) or utils.is_secure_port(dfs_dn_https_port)
    elif dfs_http_policy == "HTTP_AND_HTTPS":
      secure_dn_ports_are_in_use = utils.is_secure_port(dfs_dn_port) or utils.is_secure_port(dfs_dn_http_port) or utils.is_secure_port(dfs_dn_https_port)
    else:   # params.dfs_http_policy == "HTTP_ONLY" or not defined:
      secure_dn_ports_are_in_use = utils.is_secure_port(dfs_dn_port) or utils.is_secure_port(dfs_dn_http_port)
    if secure_dn_ports_are_in_use:
      hadoop_secure_dn_user = hdfs_user
    else:
      hadoop_secure_dn_user = '******'

ambari_libs_dir = "/var/lib/ambari-agent/lib"
limits_conf_dir = "/etc/security/limits.d"

hdfs_user_nofile_limit = default("/configurations/hadoop-env/hdfs_user_nofile_limit", "128000")
hdfs_user_nproc_limit = default("/configurations/hadoop-env/hdfs_user_nproc_limit", "65536")