def pre_upgrade_restart(self, env, upgrade_type=None):
    import params
    env.set_params(params)

    if Script.is_hdp_stack_greater_or_equal('2.3.0.0'):
      conf_select.select(params.stack_name, "hadoop", params.version)
      hdp_select.select("hadoop-hdfs-nfs3", params.version)
Beispiel #2
0
    def pre_upgrade_restart(self, env, upgrade_type=None):
        import params
        env.set_params(params)

        if Script.is_hdp_stack_greater_or_equal('2.3.0.0'):
            conf_select.select(params.stack_name, "hadoop", params.version)
            hdp_select.select("hadoop-hdfs-nfs3", params.version)
  def pre_rolling_restart(self, env):
    import params
    env.set_params(params)

    if Script.is_hdp_stack_greater_or_equal("2.3"):
      # phoenix uses hbase configs
      conf_select.select(params.stack_name, "hbase", params.version)
      hdp_select.select("phoenix-server", params.version)
Beispiel #4
0
    def pre_rolling_restart(self, env):
        import params
        env.set_params(params)

        if Script.is_hdp_stack_greater_or_equal("2.3"):
            # phoenix uses hbase configs
            conf_select.select(params.stack_name, "hbase", params.version)
            hdp_select.select("phoenix-server", params.version)
Beispiel #5
0
  def pre_rolling_restart(self, env):
    Logger.info("Executing Metastore Rolling Upgrade pre-restart")
    import params
    env.set_params(params)

    if Script.is_hdp_stack_greater_or_equal("2.3"):
      self.upgrade_schema(env)

    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
      conf_select.select(params.stack_name, "hive", params.version)
      hdp_select.select("hive-metastore", params.version)
Beispiel #6
0
    def pre_rolling_restart(self, env):
        Logger.info("Executing Metastore Rolling Upgrade pre-restart")
        import params

        env.set_params(params)

        if Script.is_hdp_stack_greater_or_equal("2.3"):
            self.upgrade_schema(env)

        if params.version and compare_versions(format_hdp_stack_version(params.version), "2.2.0.0") >= 0:
            conf_select.select(params.stack_name, "hive", params.version)
            hdp_select.select("hive-metastore", params.version)
Beispiel #7
0
  def pre_upgrade_restart(self, env, upgrade_type=None):
    Logger.info("Executing Metastore Stack Upgrade pre-restart")
    import params

    env.set_params(params)

    is_stack_hdp_23 = Script.is_hdp_stack_greater_or_equal("2.3")
    is_upgrade = params.upgrade_direction == Direction.UPGRADE

    if is_stack_hdp_23 and is_upgrade:
      self.upgrade_schema(env)

    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
      conf_select.select(params.stack_name, "hive", params.version)
      hdp_select.select("hive-metastore", params.version)
Beispiel #8
0
    def pre_upgrade_restart(self, env, upgrade_type=None):
        Logger.info("Executing Metastore Stack Upgrade pre-restart")
        import params
        env.set_params(params)

        if Script.is_hdp_stack_greater_or_equal("2.3"):
            # ensure that configurations are written out before trying to upgrade the schema
            # since the schematool needs configs and doesn't know how to use the hive conf override
            self.configure(env)
            self.upgrade_schema(env)

        if params.version and compare_versions(
                format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
            conf_select.select(params.stack_name, "hive", params.version)
            hdp_select.select("hive-metastore", params.version)
  def pre_upgrade_restart(self, env, upgrade_type=None):
    Logger.info("Executing Metastore Stack Upgrade pre-restart")
    import params

    env.set_params(params)

    is_stack_hdp_23 = Script.is_hdp_stack_greater_or_equal("2.3")
    is_upgrade = params.upgrade_direction == Direction.UPGRADE

    if is_stack_hdp_23 and is_upgrade:
      self.upgrade_schema(env)

    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
      conf_select.select(params.stack_name, "hive", params.version)
      hdp_select.select("hive-metastore", params.version)
def link_configs(struct_out_file):
  """
  Links configs, only on a fresh install of HDP-2.3 and higher
  """

  if not Script.is_hdp_stack_greater_or_equal("2.3"):
    Logger.info("Can only link configs for HDP-2.3 and higher.")
    return

  json_version = load_version(struct_out_file)

  if not json_version:
    Logger.info("Could not load 'version' from {0}".format(struct_out_file))
    return

  for k, v in conf_select.PACKAGE_DIRS.iteritems():
    _link_configs(k, json_version, v['conf_dir'], v['current_dir'])
def link_configs(struct_out_file):
    """
  """

    if not Script.is_hdp_stack_greater_or_equal("2.3"):
        Logger.info("")
        return

    json_version = load_version(struct_out_file)

    if not json_version:
        Logger.info(
            "Could not load 'version' from {0}".format(struct_out_file))
        return

    for k, v in conf_select.PACKAGE_DIRS.iteritems():
        _link_configs(k, json_version, v)
def link_configs(struct_out_file):
  """
  Links configs, only on a fresh install of HDP-2.3 and higher
  """

  if not Script.is_hdp_stack_greater_or_equal("2.3"):
    Logger.info("Can only link configs for HDP-2.3 and higher.")
    return

  json_version = load_version(struct_out_file)

  if not json_version:
    Logger.info("Could not load 'version' from {0}".format(struct_out_file))
    return

  for k, v in conf_select.PACKAGE_DIRS.iteritems():
    conf_select.convert_conf_directories_to_symlinks(k, json_version, v)
def link_configs(struct_out_file):
    """
  Links configs, only on a fresh install of HDP-2.3 and higher
  """
    import params

    if not Script.is_hdp_stack_greater_or_equal("2.3"):
        Logger.info("Can only link configs for HDP-2.3 and higher.")
        return

    json_version = load_version(struct_out_file)

    if not json_version:
        Logger.info(
            "Could not load 'version' from {0}".format(struct_out_file))
        return

    # On parallel command execution this should be executed by a single process at a time.
    with FcntlBasedProcessLock(params.link_configs_lock_file,
                               enabled=params.is_parallel_execution_enabled,
                               skip_fcntl_failures=True):
        for k, v in conf_select.PACKAGE_DIRS.iteritems():
            conf_select.convert_conf_directories_to_symlinks(
                k, json_version, v)
Beispiel #14
0
# hadoop default params
mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"

# upgrades would cause these directories to have a version instead of "current"
# which would cause a lot of problems when writing out hadoop-env.sh; instead
# force the use of "current" in the hook
hadoop_home = hdp_select.get_hadoop_dir("home", force_latest_on_upgrade=True)
hadoop_libexec_dir = hdp_select.get_hadoop_dir("libexec", force_latest_on_upgrade=True)

hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
hadoop_secure_dn_user = hdfs_user
hadoop_dir = "/etc/hadoop"
versioned_hdp_root = '/usr/hdp/current'

# HDP 2.2+ params
if Script.is_hdp_stack_greater_or_equal("2.2"):
  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"

  # not supported in HDP 2.2+
  hadoop_conf_empty_dir = None

  if not security_enabled:
    hadoop_secure_dn_user = '******'
  else:
    dfs_dn_port = get_port(dfs_dn_addr)
    dfs_dn_http_port = get_port(dfs_dn_http_addr)
    dfs_dn_https_port = get_port(dfs_dn_https_addr)
    # We try to avoid inability to start datanode as a plain user due to usage of root-owned ports
    if dfs_http_policy == "HTTPS_ONLY":
      secure_dn_ports_are_in_use = is_secure_port(dfs_dn_port) or is_secure_port(dfs_dn_https_port)
    elif dfs_http_policy == "HTTP_AND_HTTPS":
Beispiel #15
0
# server configurations
config = Script.get_config()
tmp_dir = Script.get_tmp_dir()
sudo = AMBARI_SUDO_BINARY

stack_name = default("/hostLevelParams/stack_name", None)
upgrade_direction = default("/commandParams/upgrade_direction", Direction.UPGRADE)
version = default("/commandParams/version", None)

storm_component_home_dir = status_params.storm_component_home_dir
conf_dir = status_params.conf_dir

stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
stack_is_hdp22_or_further = Script.is_hdp_stack_greater_or_equal("2.2")

# default hadoop params
rest_lib_dir = "/usr/lib/storm/contrib/storm-rest"
storm_bin_dir = "/usr/bin"
storm_lib_dir = "/usr/lib/storm/lib/"

# hadoop parameters for 2.2+
if stack_is_hdp22_or_further:
  rest_lib_dir = format("{storm_component_home_dir}/contrib/storm-rest")
  storm_bin_dir = format("{storm_component_home_dir}/bin")
  storm_lib_dir = format("{storm_component_home_dir}/lib")
  log4j_dir = format("{storm_component_home_dir}/log4j2")

storm_user = config['configurations']['storm-env']['storm_user']
log_dir = config['configurations']['storm-env']['storm_log_dir']
Beispiel #16
0
sudo = AMBARI_SUDO_BINARY

stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)

# current host stack version
current_version = default("/hostLevelParams/current_version", None)

# default hadoop params
mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
hadoop_libexec_dir = hdp_select.get_hadoop_dir("libexec")
hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"

# HDP 2.2+ params
if Script.is_hdp_stack_greater_or_equal("2.2"):
    mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"

    # not supported in HDP 2.2+
    hadoop_conf_empty_dir = None

versioned_hdp_root = '/usr/hdp/current'

#security params
security_enabled = config['configurations']['cluster-env']['security_enabled']

#java params
java_home = config['hostLevelParams']['java_home']

#hadoop params
hdfs_log_dir_prefix = config['configurations']['hadoop-env'][
Beispiel #17
0
from resource_management.core.system import System
from ambari_commons.os_check import OSCheck

config = Script.get_config()
sudo = AMBARI_SUDO_BINARY

stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)

# default hadoop params
mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
hadoop_libexec_dir = hdp_select.get_hadoop_dir("libexec")
hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"

# HDP 2.2+ params
if Script.is_hdp_stack_greater_or_equal("2.2"):
    mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"

    # not supported in HDP 2.2+
    hadoop_conf_empty_dir = None

versioned_hdp_root = '/usr/hdp/current'

#security params
security_enabled = config['configurations']['cluster-env']['security_enabled']

#java params
java_home = config['hostLevelParams']['java_home']

#hadoop params
hdfs_log_dir_prefix = config['configurations']['hadoop-env'][
Beispiel #18
0
config = Script.get_config()
tmp_dir = Script.get_tmp_dir()

stack_name = default("/hostLevelParams/stack_name", None)
version = default("/commandParams/version", None)
host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)

stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)

xml_configurations_supported = config['configurations']['ranger-env'][
    'xml_configurations_supported']

create_db_dbuser = config['configurations']['ranger-env']['create_db_dbuser']

stack_is_hdp22_or_further = Script.is_hdp_stack_greater_or_equal("2.2")
stack_is_hdp23_or_further = Script.is_hdp_stack_greater_or_equal("2.3")

downgrade_from_version = default("/commandParams/downgrade_from_version", None)
upgrade_direction = default("/commandParams/upgrade_direction", None)

ranger_conf = '/etc/ranger/admin/conf'
ranger_ugsync_conf = '/etc/ranger/usersync/conf'

if upgrade_direction == Direction.DOWNGRADE and compare_versions(
        format_hdp_stack_version(version), '2.3') < 0:
    stack_is_hdp22_or_further = True
    stack_is_hdp23_or_further = False

ranger_home = '/usr/lib/ranger-admin'
ranger_conf = '/etc/ranger/admin/conf'
Beispiel #19
0
from resource_management.core.system import System
from ambari_commons.os_check import OSCheck

config = Script.get_config()
sudo = AMBARI_SUDO_BINARY

stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)

# default hadoop params
mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
hadoop_libexec_dir = hdp_select.get_hadoop_dir("libexec")
hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"

# HDP 2.2+ params
if Script.is_hdp_stack_greater_or_equal("2.2"):
  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"

  # not supported in HDP 2.2+
  hadoop_conf_empty_dir = None

versioned_hdp_root = '/usr/hdp/current'

#security params
security_enabled = config['configurations']['cluster-env']['security_enabled']

#java params
java_home = config['hostLevelParams']['java_home']

#hadoop params
hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']