def pre_rolling_restart(self, env): import params env.set_params(params) # this function should not execute if the version can't be determined or # is not at least HDP 2.2.0.0 if Script.is_hdp_stack_less_than("2.2"): return Logger.info("Executing Falcon Server Rolling Upgrade pre-restart") conf_select.select(params.stack_name, "falcon", params.version) hdp_select.select("falcon-server", params.version) falcon_server_upgrade.pre_start_restore()
def pre_rolling_restart(self, env): import params env.set_params(params) # this function should not execute if the version can't be determined or # is not at least HDP 2.2.0.0 if Script.is_hdp_stack_less_than("2.2"): return Logger.info("Executing Falcon Server Rolling Upgrade pre-restart") conf_select.select(params.stack_name, "falcon", params.version) hdp_select.select("falcon-server", params.version) falcon_server_upgrade.pre_start_restore()
#security params security_enabled = config['configurations']['cluster-env']['security_enabled'] #java params java_home = config['hostLevelParams']['java_home'] #hadoop params hdfs_log_dir_prefix = config['configurations']['hadoop-env'][ 'hdfs_log_dir_prefix'] hadoop_pid_dir_prefix = config['configurations']['hadoop-env'][ 'hadoop_pid_dir_prefix'] hadoop_root_logger = config['configurations']['hadoop-env'][ 'hadoop_root_logger'] if Script.is_hdp_stack_greater_or_equal( "2.0") and Script.is_hdp_stack_less_than( "2.1") and not OSCheck.is_suse_family(): # deprecated rhel jsvc_path jsvc_path = "/usr/libexec/bigtop-utils" else: jsvc_path = "/usr/lib/bigtop-utils" hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize'] namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize'] namenode_opt_newsize = config['configurations']['hadoop-env'][ 'namenode_opt_newsize'] namenode_opt_maxnewsize = config['configurations']['hadoop-env'][ 'namenode_opt_maxnewsize'] namenode_opt_permsize = format_jvm_option( "/configurations/hadoop-env/namenode_opt_permsize", "128m") namenode_opt_maxpermsize = format_jvm_option( "/configurations/hadoop-env/namenode_opt_maxpermsize", "256m")
hadoop_conf_empty_dir = None versioned_hdp_root = '/usr/hdp/current' #security params security_enabled = config['configurations']['cluster-env']['security_enabled'] #java params java_home = config['hostLevelParams']['java_home'] #hadoop params hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix'] hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix'] hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger'] if Script.is_hdp_stack_greater_or_equal("2.0") and Script.is_hdp_stack_less_than("2.1") and not OSCheck.is_suse_family(): # deprecated rhel jsvc_path jsvc_path = "/usr/libexec/bigtop-utils" else: jsvc_path = "/usr/lib/bigtop-utils" hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize'] namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize'] namenode_opt_newsize = config['configurations']['hadoop-env']['namenode_opt_newsize'] namenode_opt_maxnewsize = config['configurations']['hadoop-env']['namenode_opt_maxnewsize'] namenode_opt_permsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_permsize","128m") namenode_opt_maxpermsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_maxpermsize","256m") jtnode_opt_newsize = "200m" jtnode_opt_maxnewsize = "200m" jtnode_heapsize = "1024m"