Пример #1
0
    def pre_upgrade_restart(self, env, upgrade_type=None):
        import params
        env.set_params(params)

        if Script.is_stack_greater_or_equal("4.2"):
            # phoenix uses hbase configs
            stack_select.select_packages(params.version)
Пример #2
0
  def pre_upgrade_restart(self, env, upgrade_type=None):
    import params
    env.set_params(params)

    if Script.is_stack_greater_or_equal('4.1.0.0'):
      conf_select.select(params.stack_name, "hadoop", params.version)
      stack_select.select("hadoop-hdfs-nfs3", params.version)
Пример #3
0
def check_stack_feature(stack_feature, stack_version):
    """
  Given a stack_feature and a specific stack_version, it validates that the feature is supported by the stack_version.
  :param stack_feature: Feature name to check if it is supported by the stack. For example: "rolling_upgrade"
  :param stack_version: Version of the stack
  :return: Will return True if successful, otherwise, False. 
  """
    stack_features_config = default(
        "/configurations/cluster-env/stack_features", None)
    data = _DEFAULT_STACK_FEATURES

    if stack_features_config:
        data = json.loads(stack_features_config)

    for feature in data["stack_features"]:
        if feature["name"] == stack_feature:
            if "min_version" in feature:
                min_version = feature["min_version"]
                if Script.is_stack_less_than(min_version):
                    return False
            if "max_version" in feature:
                max_version = feature["max_version"]
                if Script.is_stack_greater_or_equal(max_version):
                    return False
            return True

    return False
Пример #4
0
    def pre_upgrade_restart(self, env, upgrade_type=None):
        import params
        env.set_params(params)

        if Script.is_stack_greater_or_equal("4.2"):
            # phoenix uses hbase configs
            conf_select.select(params.stack_name, "hbase", params.version)
            stack_select.select("phoenix-server", params.version)
Пример #5
0
    def pre_upgrade_restart(self, env, upgrade_type=None):
        Logger.info("Executing Metastore Rolling Upgrade pre-restart")
        import params
        env.set_params(params)

        if Script.is_stack_greater_or_equal("4.1.0.0"):
            self.upgrade_schema(env)

        if params.version and compare_versions(
                format_stack_version(params.version), '4.0.0.0') >= 0:
            stack_select.select_packages(params.version)
Пример #6
0
    def pre_upgrade_restart(self, env, upgrade_type=None):
        Logger.info("Executing Metastore Stack Upgrade pre-restart")
        import params

        env.set_params(params)

        is_stack_hdp_23 = Script.is_stack_greater_or_equal("4.2.0.0")
        is_upgrade = params.upgrade_direction == Direction.UPGRADE

        if is_stack_hdp_23 and is_upgrade:
            self.upgrade_schema(env)

        if params.version and compare_versions(
                format_stack_version(params.version), '4.0.0.0') >= 0:
            stack_select.select_packages(params.version)
Пример #7
0
def link_configs(struct_out_file):
    """
  Links configs, only on a fresh install of BigInsights-4.1 and higher
  """

    if not Script.is_stack_greater_or_equal("4.1"):
        Logger.info("Can only link configs for BigInsights-4.1 and higher.")
        return

    json_version = load_version(struct_out_file)

    if not json_version:
        Logger.info(
            "Could not load 'version' from {0}".format(struct_out_file))
        return

    for k, v in conf_select.get_package_dirs().iteritems():
        conf_select.convert_conf_directories_to_symlinks(k, json_version, v)
Пример #8
0
def link_configs(struct_out_file):
    """
  Links configs, only on a fresh install of HDP-2.3 and higher
  """

    if not Script.is_stack_greater_or_equal("2.3"):
        Logger.info("Can only link configs for HDP-2.3 and higher.")
        return

    json_version = load_version(struct_out_file)

    if not json_version:
        Logger.info(
            "Could not load 'version' from {0}".format(struct_out_file))
        return

    for k, v in conf_select.PACKAGE_DIRS.iteritems():
        conf_select.convert_conf_directories_to_symlinks(k, json_version, v)
Пример #9
0
def link_configs(struct_out_file):
  """
  Links configs, only on a fresh install of HDF-0.3 and higher
  """
  import params

  if not Script.is_stack_greater_or_equal("0.3"):
    Logger.info("Can only link configs for HDF-0.3 and higher.")
    return

  json_version = load_version(struct_out_file)

  if not json_version:
    Logger.info("Could not load 'version' from {0}".format(struct_out_file))
    return

  # On parallel command execution this should be executed by a single process at a time.
  with FcntlBasedProcessLock(params.link_configs_lock_file, enabled = params.is_parallel_execution_enabled, skip_fcntl_failures = True):
    for k, v in conf_select.get_package_dirs().iteritems():
      conf_select.convert_conf_directories_to_symlinks(k, json_version, v)
Пример #10
0
# server configurations
config = Script.get_config()
tmp_dir = Script.get_tmp_dir()
sudo = AMBARI_SUDO_BINARY

stack_name = default("/hostLevelParams/stack_name", None)
upgrade_direction = default("/commandParams/upgrade_direction",
                            Direction.UPGRADE)
version = default("/commandParams/version", None)

storm_component_home_dir = status_params.storm_component_home_dir
conf_dir = status_params.conf_dir

stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
stack_version_formatted = format_stack_version(stack_version_unformatted)
stack_is_hdp22_or_further = Script.is_stack_greater_or_equal("2.2")

# default hadoop params
rest_lib_dir = "/usr/lib/storm/contrib/storm-rest"
storm_bin_dir = "/usr/bin"
storm_lib_dir = "/usr/lib/storm/lib/"

# hadoop parameters for 2.2+
if stack_is_hdp22_or_further:
    rest_lib_dir = format("{storm_component_home_dir}/contrib/storm-rest")
    storm_bin_dir = format("{storm_component_home_dir}/bin")
    storm_lib_dir = format("{storm_component_home_dir}/lib")
    log4j_dir = format("{storm_component_home_dir}/log4j2")

storm_user = config['configurations']['storm-env']['storm_user']
log_dir = config['configurations']['storm-env']['storm_log_dir']
Пример #11
0
hadoop_home = stack_select.get_hadoop_dir("home", force_latest_on_upgrade=True)
hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec",
                                                 force_latest_on_upgrade=True)

hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
hadoop_secure_dn_user = hdfs_user
hadoop_dir = "/etc/hadoop"
versioned_stack_root = '/usr/hdp/current'
hadoop_java_io_tmpdir = os.path.join(tmp_dir, "hadoop_java_io_tmpdir")
datanode_max_locked_memory = config['configurations']['hdfs-site'][
    'dfs.datanode.max.locked.memory']
is_datanode_max_locked_memory_set = not is_empty(
    config['configurations']['hdfs-site']['dfs.datanode.max.locked.memory'])

# HDP 2.2+ params
if Script.is_stack_greater_or_equal("2.2"):
    mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"

    # not supported in HDP 2.2+
    hadoop_conf_empty_dir = None

    if not security_enabled:
        hadoop_secure_dn_user = '******'
    else:
        dfs_dn_port = get_port(dfs_dn_addr)
        dfs_dn_http_port = get_port(dfs_dn_http_addr)
        dfs_dn_https_port = get_port(dfs_dn_https_addr)
        # We try to avoid inability to start datanode as a plain user due to usage of root-owned ports
        if dfs_http_policy == "HTTPS_ONLY":
            secure_dn_ports_are_in_use = is_secure_port(
                dfs_dn_port) or is_secure_port(dfs_dn_https_port)
Пример #12
0
    def pre_upgrade_restart(self, env, upgrade_type=None):
        import params
        env.set_params(params)

        if Script.is_stack_greater_or_equal('4.1.0.0'):
            stack_select.select_packages(params.version)
Пример #13
0
# force the use of "current" in the hook
hadoop_home = stack_select.get_hadoop_dir("home")
hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")

hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
hadoop_secure_dn_user = hdfs_user
hadoop_dir = "/etc/hadoop"
versioned_stack_root = '/usr/iop/current'
hadoop_java_io_tmpdir = os.path.join(tmp_dir, "hadoop_java_io_tmpdir")
datanode_max_locked_memory = config['configurations']['hdfs-site'][
    'dfs.datanode.max.locked.memory']
is_datanode_max_locked_memory_set = not is_empty(
    config['configurations']['hdfs-site']['dfs.datanode.max.locked.memory'])

# IOP 4.0+ params
if Script.is_stack_greater_or_equal("4.0"):
    mapreduce_libs_path = "/usr/iop/current/hadoop-mapreduce-client/*"

    # not supported in IOP 4.0+
    hadoop_conf_empty_dir = None

    if not security_enabled:
        hadoop_secure_dn_user = '******'
    else:
        dfs_dn_port = get_port(dfs_dn_addr)
        dfs_dn_http_port = get_port(dfs_dn_http_addr)
        dfs_dn_https_port = get_port(dfs_dn_https_addr)
        # We try to avoid inability to start datanode as a plain user due to usage of root-owned ports
        if dfs_http_policy == "HTTPS_ONLY":
            secure_dn_ports_are_in_use = is_secure_port(
                dfs_dn_port) or is_secure_port(dfs_dn_https_port)
Пример #14
0
def execute(configurations={}, parameters={}, host_name=None):
    """
  Returns a tuple containing the result code and a pre-formatted result label

  Keyword arguments:
  configurations (dictionary): a mapping of configuration key to value
  parameters (dictionary): a mapping of script parameter key to value
  host_name (string): the name of this host where the alert is running
  """

    if configurations is None:
        return (('UNKNOWN',
                 ['There were no configurations supplied to the script.']))

    ranger_link = None
    ranger_auth_link = None
    ranger_get_user = None
    admin_username = None
    admin_password = None
    ranger_admin_username = None
    ranger_admin_password = None
    security_enabled = False

    stack_is_hdp25_or_further = Script.is_stack_greater_or_equal("2.5")

    if RANGER_ADMIN_URL in configurations:
        ranger_link = configurations[RANGER_ADMIN_URL]
        if ranger_link.endswith('/'):
            ranger_link = ranger_link[:-1]
        ranger_auth_link = '{0}/{1}'.format(
            ranger_link, 'service/public/api/repository/count')
        ranger_get_user = '******'.format(ranger_link, 'service/xusers/users')

    if ADMIN_USERNAME in configurations:
        admin_username = configurations[ADMIN_USERNAME]

    if ADMIN_PASSWORD in configurations:
        admin_password = configurations[ADMIN_PASSWORD]

    if RANGER_ADMIN_USERNAME in configurations:
        ranger_admin_username = configurations[RANGER_ADMIN_USERNAME]

    if RANGER_ADMIN_PASSWORD in configurations:
        ranger_admin_password = configurations[RANGER_ADMIN_PASSWORD]

    if SECURITY_ENABLED in configurations:
        security_enabled = str(
            configurations[SECURITY_ENABLED]).upper() == 'TRUE'

    label = None
    result_code = 'OK'

    try:
        if security_enabled and stack_is_hdp25_or_further:
            result_code = 'UNKNOWN'
            label = 'This alert will get skipped for Ranger Admin on kerberos env'
        else:
            admin_http_code = check_ranger_login(ranger_auth_link,
                                                 admin_username,
                                                 admin_password)
            if admin_http_code == 200:
                get_user_code = get_ranger_user(ranger_get_user,
                                                admin_username, admin_password,
                                                ranger_admin_username)
                if get_user_code:
                    user_http_code = check_ranger_login(
                        ranger_auth_link, ranger_admin_username,
                        ranger_admin_password)
                    if user_http_code == 200:
                        result_code = 'OK'
                        label = 'Login Successful for users {0} and {1}'.format(
                            admin_username, ranger_admin_username)
                    elif user_http_code == 401:
                        result_code = 'CRITICAL'
                        label = 'User:{0} credentials on Ambari UI are not in sync with Ranger'.format(
                            ranger_admin_username)
                    else:
                        result_code = 'WARNING'
                        label = 'Ranger Admin service is not reachable, please restart the service'
                else:
                    result_code = 'OK'
                    label = 'Login Successful for user: {0}. User:{1} user not yet synced with Ranger'.format(
                        admin_username, ranger_admin_username)
            elif admin_http_code == 401:
                result_code = 'CRITICAL'
                label = 'User:{0} credentials on Ambari UI are not in sync with Ranger'.format(
                    admin_username)
            else:
                result_code = 'WARNING'
                label = 'Ranger Admin service is not reachable, please restart the service'

    except Exception, e:
        label = str(e)
        result_code = 'UNKNOWN'
        logger.exception(label)