Example #1
0
hadoop_pid_dir_prefix = status_params.hadoop_pid_dir_prefix

# Some datanode settings
dfs_dn_addr = default('/configurations/hdfs-site/dfs.datanode.address', None)
dfs_dn_http_addr = default(
    '/configurations/hdfs-site/dfs.datanode.http.address', None)
dfs_dn_https_addr = default(
    '/configurations/hdfs-site/dfs.datanode.https.address', None)
dfs_http_policy = default('/configurations/hdfs-site/dfs.http.policy', None)
dfs_dn_ipc_address = config['configurations']['hdfs-site'][
    'dfs.datanode.ipc.address']
secure_dn_ports_are_in_use = False

# hadoop default parameters
mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
hadoop_libexec_dir = conf_select.get_hadoop_dir("libexec")
hadoop_bin = conf_select.get_hadoop_dir("sbin")
hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
hadoop_home = "/usr/lib/hadoop"
hadoop_secure_dn_user = hdfs_user
hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
hadoop_lib_home = conf_select.get_hadoop_dir("lib")

# hadoop parameters for 2.2+
if Script.is_hdp_stack_greater_or_equal("2.2"):
    mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
    hadoop_home = "/usr/hdp/current/hadoop-client"

    if not security_enabled:
        hadoop_secure_dn_user = '******'
    else:
Example #2
0
    daemon_name = 'mysql'
  else:
    daemon_name = 'mysqld'

  # Security related/required params
  hostname = config['hostname']
  security_enabled = config['configurations']['cluster-env']['security_enabled']
  kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
  tmp_dir = Script.get_tmp_dir()
  hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
  hive_user = config['configurations']['hive-env']['hive_user']
  webhcat_user = config['configurations']['hive-env']['webhcat_user']

  # default configuration directories
  hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
  hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
  webhcat_conf_dir = '/etc/hive-webhcat/conf'
  hive_etc_dir_prefix = "/etc/hive"
  hive_conf_dir = "/etc/hive/conf"
  hive_client_conf_dir = "/etc/hive/conf"

  # !!! required by ranger to be at this location unless HDP 2.3+
  hive_server_conf_dir = "/etc/hive/conf.server"

  # HDP 2.2+
  if Script.is_hdp_stack_greater_or_equal("2.2"):
    webhcat_conf_dir = '/usr/hdp/current/hive-webhcat/conf'
    hive_conf_dir = format("/usr/hdp/current/{component_directory}/conf")
    hive_client_conf_dir = format("/usr/hdp/current/{component_directory}/conf")

  # HDP 2.3+
Example #3
0
from ambari_commons.constants import AMBARI_SUDO_BINARY
from resource_management.libraries.functions import conf_select
from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
from resource_management import *
from resource_management.core.system import System
from ambari_commons.os_check import OSCheck

config = Script.get_config()
sudo = AMBARI_SUDO_BINARY

stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)

# default hadoop params
mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
hadoop_libexec_dir = conf_select.get_hadoop_dir("libexec")
hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"

# HDP 2.2+ params
if Script.is_hdp_stack_greater_or_equal("2.2"):
    mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"

    # not supported in HDP 2.2+
    hadoop_conf_empty_dir = None

versioned_hdp_root = '/usr/hdp/current'

#security params
security_enabled = config['configurations']['cluster-env']['security_enabled']