def get_lzo_packages(): lzo_packages = [] script_instance = Script.get_instance() if OSCheck.is_suse_family() and int(OSCheck.get_os_major_version()) >= 12: lzo_packages += ["liblzo2-2"] elif OSCheck.is_redhat_family() or OSCheck.is_suse_family(): lzo_packages += ["lzo"] elif OSCheck.is_ubuntu_family(): lzo_packages += ["liblzo2-2"] stack_version_unformatted = stack_features.get_stack_feature_version( Script.get_config() ) # only used to check stack_feature, NOT as package version! if stack_version_unformatted and check_stack_feature( StackFeature.ROLLING_UPGRADE, stack_version_unformatted): if OSCheck.is_ubuntu_family(): lzo_packages += [ script_instance.format_package_name( "hadooplzo-${stack_version}"), script_instance.format_package_name( "hadooplzo-${stack_version}-native") ] else: lzo_packages += [ script_instance.format_package_name( "hadooplzo_${stack_version}"), script_instance.format_package_name( "hadooplzo_${stack_version}-native") ] return lzo_packages
def actionexecute(self, env): num_errors = 0 # Parse parameters config = Script.get_config() try: command_repository = CommandRepository(config['repositoryFile']) except KeyError: raise Fail( "The command repository indicated by 'repositoryFile' was not found" ) repo_rhel_suse = config['configurations']['cluster-env'][ 'repo_suse_rhel_template'] repo_ubuntu = config['configurations']['cluster-env'][ 'repo_ubuntu_template'] template = repo_rhel_suse if OSCheck.is_redhat_family( ) or OSCheck.is_suse_family() else repo_ubuntu # Handle a SIGTERM and SIGINT gracefully signal.signal(signal.SIGTERM, self.abort_handler) signal.signal(signal.SIGINT, self.abort_handler) self.repository_version = command_repository.version_string # Select dict that contains parameters try: package_list = json.loads(config['roleParams']['package_list']) stack_id = config['roleParams']['stack_id'] except KeyError: pass self.stack_name = Script.get_stack_name() if self.stack_name is None: raise Fail("Cannot determine the stack name") self.stack_root_folder = Script.get_stack_root() if self.stack_root_folder is None: raise Fail("Cannot determine the stack's root directory") if self.repository_version is None: raise Fail("Cannot determine the repository version to install") self.repository_version = self.repository_version.strip() try: if 0 == len(command_repository.repositories): Logger.warning( "Repository list is empty. Ambari may not be managing the repositories for {0}." .format(self.repository_version)) else: Logger.info( "Will install packages for repository version {0}".format( self.repository_version)) create_repo_files(template, command_repository) except Exception, err: Logger.logger.exception( "Cannot install repository files. Error: {0}".format(str(err))) num_errors += 1
def install_repository(self, url_info, repository_version, append_to_file): template = "repo_suse_rhel.j2" if OSCheck.is_redhat_family( ) or OSCheck.is_suse_family() else "repo_ubuntu.j2" repo = { 'repoName': "{0}-{1}".format(url_info['name'], repository_version) } if not 'baseUrl' in url_info: repo['baseurl'] = None else: repo['baseurl'] = url_info['baseUrl'] if not 'mirrorsList' in url_info: repo['mirrorsList'] = None else: repo['mirrorsList'] = url_info['mirrorsList'] ubuntu_components = [url_info['name'] ] + self.UBUNTU_REPO_COMPONENTS_POSTFIX file_name = self.REPO_FILE_NAME_PREFIX + repository_version Repository( repo['repoName'], action="create", base_url=repo['baseurl'], mirror_list=repo['mirrorsList'], repo_file_name=file_name, repo_template=template, append_to_file=append_to_file, components=ubuntu_components, # ubuntu specific ) return repo['repoName'], file_name
def list_ambari_managed_repos(stack_name): """ Lists all repositories that are present at host """ stack_name = stack_name.upper() # TODO : get it dynamically from the server repository_names = [stack_name, stack_name + "-UTILS" ] if OSCheck.is_ubuntu_family(): repo_dir = '/etc/apt/sources.list.d/' elif OSCheck.is_redhat_family(): # Centos/RHEL 5/6 repo_dir = '/etc/yum.repos.d/' elif OSCheck.is_suse_family(): repo_dir = '/etc/zypp/repos.d/' else: raise Fail('Can not dermine repo dir') repos = [] for name in repository_names: # List all files that match pattern files = glob.glob(os.path.join(repo_dir, name) + '*') for f in files: filename = os.path.basename(f) # leave out extension reponame = os.path.splitext(filename)[0] repos.append(reponame) # get uniq strings seen = set() uniq = [s for s in repos if not (s in seen or seen.add(s))] return uniq
def list_ambari_managed_repos(stack_name): """ Lists all repositories that are present at host """ stack_name = stack_name.upper() # TODO : get it dynamically from the server repository_names = [stack_name, stack_name + "-UTILS"] if OSCheck.is_ubuntu_family(): repo_dir = '/etc/apt/sources.list.d/' elif OSCheck.is_redhat_family(): # Centos/RHEL 5/6 repo_dir = '/etc/yum.repos.d/' elif OSCheck.is_suse_family(): repo_dir = '/etc/zypp/repos.d/' else: raise Fail('Can not dermine repo dir') repos = [] for name in repository_names: # List all files that match pattern files = glob.glob(os.path.join(repo_dir, name) + '*') for f in files: filename = os.path.basename(f) # leave out extension reponame = os.path.splitext(filename)[0] repos.append(reponame) # get uniq strings seen = set() uniq = [s for s in repos if not (s in seen or seen.add(s))] return uniq
def actionexecute(self, env): config = Script.get_config() structured_output = {} try: repo_info = config['repositoryFile'] for item in repo_info["repositories"]: base_url = item["baseUrl"] repo_name = item["repoName"] repo_id = item["repoId"] distribution = item["distribution"] if "distribution" in item else None components = item["components"] if "components" in item else None repo_rhel_suse = config['configurations']['cluster-env']['repo_suse_rhel_template'] repo_ubuntu = config['configurations']['cluster-env']['repo_ubuntu_template'] template = repo_rhel_suse if OSCheck.is_suse_family() or OSCheck.is_redhat_family() else repo_ubuntu ubuntu_components = [distribution if distribution else repo_name] + \ [components.replace(",", " ") if components else self.UBUNTU_REPO_COMPONENTS_POSTFIX] Repository(repo_id, action = "create", base_url = base_url, mirror_list = None, repo_file_name = repo_name, repo_template = template, components = ubuntu_components, # ubuntu specific ) structured_output["repo_update"] = {"exit_code" : 0, "message": format("Repository files successfully updated!")} except Exception, exception: Logger.logger.exception("ERROR: There was an unexpected error while updating repositories") raise Fail("Failed to update repo files!")
def install_repository(self, url_info, repository_version, append_to_file): template = "repo_suse_rhel.j2" if OSCheck.is_redhat_family() or OSCheck.is_suse_family() else "repo_ubuntu.j2" repo = { 'repoName': "{0}-{1}".format(url_info['name'], repository_version) } if not 'baseUrl' in url_info: repo['baseurl'] = None else: repo['baseurl'] = url_info['baseUrl'] if not 'mirrorsList' in url_info: repo['mirrorsList'] = None else: repo['mirrorsList'] = url_info['mirrorsList'] ubuntu_components = [url_info['name']] + self.UBUNTU_REPO_COMPONENTS_POSTFIX file_name = self.REPO_FILE_NAME_PREFIX + repository_version Repository(repo['repoName'], action = "create", base_url = repo['baseurl'], mirror_list = repo['mirrorsList'], repo_file_name = file_name, repo_template = template, append_to_file = append_to_file, components = ubuntu_components, # ubuntu specific ) return repo['repoName'], file_name
def actionexecute(self, env): config = Script.get_config() structured_output = {} try: repo_info_json = config['hostLevelParams']['repo_info'] repo_info_dict = json.loads(repo_info_json) for item in repo_info_dict["repositories"]: base_url = item["base_url"] repo_name = item["repo_name"] repo_id = item["repo_id"] repo_rhel_suse = config['configurations']['cluster-env']['repo_suse_rhel_template'] repo_ubuntu = config['configurations']['cluster-env']['repo_ubuntu_template'] template = repo_rhel_suse if OSCheck.is_suse_family() or OSCheck.is_redhat_family() else repo_ubuntu ubuntu_components = [repo_name] + self.UBUNTU_REPO_COMPONENTS_POSTFIX Repository(repo_id, action = "create", base_url = base_url, mirror_list = None, repo_file_name = repo_name, repo_template = template, components = ubuntu_components, # ubuntu specific ) structured_output["repo_update"] = {"exit_code" : 0, "message": format("Repository files successfully updated!")} except Exception, exception: Logger.logger.exception("ERROR: There was an unexpected error while updating repositories") raise Fail("Failed to update repo files!")
def install_repos(): import params template = "repo_suse_rhel.j2" if OSCheck.is_suse_family( ) or OSCheck.is_redhat_family() else "repo_ubuntu.j2" _alter_repo("create", params.repo_info, template) if params.service_repo_info: _alter_repo("create", params.service_repo_info, template)
def server_files(): import params rrd_py_path = params.rrd_py_path Directory(rrd_py_path, recursive=True ) rrd_py_file_path = path.join(rrd_py_path, "rrd.py") TemplateConfig(rrd_py_file_path, owner="root", group="root", mode=0755 ) rrd_file_owner = params.gmetad_user Directory(params.rrdcached_base_dir, owner=rrd_file_owner, group=rrd_file_owner, mode=0755, recursive=True ) if OSCheck.is_suse_family() or OSCheck.is_ubuntu_family(): File( params.ganglia_apache_config_file, content = Template("ganglia.conf.j2"), mode = 0644 )
def get_lzo_packages(stack_version_unformatted): lzo_packages = [] script_instance = Script.get_instance() if OSCheck.is_redhat_family() or OSCheck.is_suse_family(): lzo_packages += ["lzo", "hadoop-lzo-native"] elif OSCheck.is_ubuntu_family(): lzo_packages += ["liblzo2-2"] if stack_version_unformatted and check_stack_feature( StackFeature.ROLLING_UPGRADE, stack_version_unformatted): if OSCheck.is_ubuntu_family(): lzo_packages += [ script_instance.format_package_name( "hadooplzo-${stack_version}"), script_instance.format_package_name( "hadooplzo-${stack_version}-native") ] else: lzo_packages += [ script_instance.format_package_name( "hadooplzo_${stack_version}"), script_instance.format_package_name( "hadooplzo_${stack_version}-native") ] else: lzo_packages += ["hadoop-lzo"] return lzo_packages
def get_serivice_params(self): self.system = System.get_instance() if OSCheck.is_suse_family() or OSCheck.is_ubuntu_family(): self.service_name = "apache2" self.httpd_conf_dir = '/etc/apache2' else: self.service_name = "httpd" self.httpd_conf_dir = '/etc/httpd/conf'
def install_repos(): import params if params.host_sys_prepped: return template = params.repo_rhel_suse if OSCheck.is_suse_family() or OSCheck.is_redhat_family() else params.repo_ubuntu _alter_repo("create", params.repo_info, template) if params.service_repo_info: _alter_repo("create", params.service_repo_info, template)
def stop(self, env): if OSCheck.is_suse_family(): Execute('rckadmind stop') Execute('rckrb5kdc stop') elif OSCheck.is_ubuntu_family(): Execute('service krb5-kdc stop') Execute('service krb5-admin-server stop') else: Execute('service krb5kdc stop') Execute('service kadmin stop')
def getRepoDir(self): if OSCheck.is_redhat_family(): return "/etc/yum.repos.d" elif OSCheck.is_suse_family(): return "/etc/zypp/repos.d" elif OSCheck.is_ubuntu_family(): return "/etc/apt/sources.list.d" else: raise Exception("Unsupported OS family '{0}'".format( OSCheck.get_os_family()))
def get_lzo_packages(stack_version_unformatted): lzo_packages = ["hadoop-lzo"] if OSCheck.is_redhat_family() or OSCheck.is_suse_family(): lzo_packages += ["lzo", "hadoop-lzo-native"] elif OSCheck.is_ubuntu_family(): lzo_packages += ["liblzo2-2"] underscored_version = stack_version_unformatted.replace('.', '_') dashed_version = stack_version_unformatted.replace('.', '-') hdp_stack_version = format_hdp_stack_version(stack_version_unformatted) if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0: if OSCheck.is_redhat_family() or OSCheck.is_suse_family(): lzo_packages += [format("hadooplzo_{underscored_version}_*")] elif OSCheck.is_ubuntu_family(): lzo_packages += [format("hadooplzo_{dashed_version}_*")] return lzo_packages
def get_lzo_packages(stack_version_unformatted): lzo_packages = [] script_instance = Script.get_instance() if OSCheck.is_suse_family() and int(OSCheck.get_os_major_version()) >= 12: lzo_packages += ["liblzo2-2", "hadoop-lzo-native"] elif OSCheck.is_redhat_family() or OSCheck.is_suse_family(): lzo_packages += ["lzo", "hadoop-lzo-native"] elif OSCheck.is_ubuntu_family(): lzo_packages += ["liblzo2-2"] if stack_version_unformatted and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_unformatted): if OSCheck.is_ubuntu_family(): lzo_packages += [script_instance.format_package_name("hadooplzo-${stack_version}") , script_instance.format_package_name("hadooplzo-${stack_version}-native")] else: lzo_packages += [script_instance.format_package_name("hadooplzo_${stack_version}"), script_instance.format_package_name("hadooplzo_${stack_version}-native")] else: lzo_packages += ["hadoop-lzo"] return lzo_packages
def get_base_packages_to_install(self): """ HACK: list packages which should be installed without disabling any repos. (This is planned to fix in Ambari-2.2) """ base_packages_to_install = ['fuse'] if OSCheck.is_suse_family() or OSCheck.is_ubuntu_family(): base_packages_to_install.append('libfuse2') else: base_packages_to_install.append('fuse-libs') return base_packages_to_install
def install_repos(): import params_repo #if OSCheck.is_ubuntu_family(): #cassandra_repo_file = params_repo.repo_ubuntu_path #with open(path, "a") as f: #f.write(params_repo.repo_ubuntu_content) if OSCheck.is_redhat_family() or OSCheck.is_suse_family(): with open(params_repo.repo_rhel_suse_path, "w") as f: f.writelines(params_repo.repo_rhel_suse_content) else: raise Exception('Cassandra repo file path not set for current OS.')
def get_clearcache_cmd(self): if OSCheck.is_redhat_family(): Logger.info("Clear repository cache for the RedHat OS family"); return ("/usr/bin/yum", "clean", "all") elif OSCheck.is_suse_family(): Logger.info("Clear repository cache for the SUSE OS family"); return ('/usr/bin/zypper', 'refresh') elif OSCheck.is_ubuntu_family(): Logger.info("Clear repository cache for the Ubuntu OS family"); return ('/usr/bin/apt-get', 'update') else: raise Exception("Unsupported OS family: '{0}' ".format(OSCheck.get_os_family()))
def format_package_name(self, package_name, repo_id): """ This method overcomes problems at SLES SP3. Zypper here behaves differently than at SP1, and refuses to install packages by mask if there is any installed package that matches this mask. So we preppend concrete HDP version to mask under Suse """ if OSCheck.is_suse_family() and '*' in package_name: mask_version = re.search(r'((_\d+)*(_)?\*)', package_name).group(0) formatted_version = '_' + repo_id.replace('.', '_').replace('-', '_') + '*' return package_name.replace(mask_version, formatted_version) else: return package_name
def __init__(self, config, tags_to_skip): self.tags_to_skip = tags_to_skip # repo templates repo_file = config['repositoryFile'] repo_rhel_suse = config['configurations']['cluster-env']['repo_suse_rhel_template'] repo_ubuntu = config['configurations']['cluster-env']['repo_ubuntu_template'] if is_empty(repo_file): return self.template = repo_rhel_suse if OSCheck.is_redhat_family() or OSCheck.is_suse_family() else repo_ubuntu self.command_repository = CommandRepository(repo_file)
def _clear_package_manager_cache(self): package_manager_cmd = "" if OSCheck.is_redhat_family(): package_manager_cmd = ("/usr/bin/yum", "clean", "metadata") if OSCheck.is_suse_family(): package_manager_cmd = ("/usr/bin/zypper", "-q", "-n", "clean") if OSCheck.is_ubuntu_family(): return Logger.debug("Clearing repo manager metadata") Execute(package_manager_cmd, logoutput=False, sudo=True)
def install_repos(): import params if params.host_sys_prepped: return template = params.repo_rhel_suse if OSCheck.is_suse_family( ) or OSCheck.is_redhat_family() else params.repo_ubuntu # use this newer way of specifying repositories, if available if params.repo_file is not None: create_repo_files(template, CommandRepository(params.repo_file)) return _alter_repo("create", params.repo_info, template) if params.service_repo_info: _alter_repo("create", params.service_repo_info, template)
def get_lzo_packages(stack_version_unformatted): lzo_packages = [] if OSCheck.is_redhat_family() or OSCheck.is_suse_family(): lzo_packages += ["lzo", "hadoop-lzo-native"] elif OSCheck.is_ubuntu_family(): lzo_packages += ["liblzo2-2"] if stack_version_unformatted and check_stack_feature( StackFeature.ROLLING_UPGRADE, stack_version_unformatted): lzo_packages += ["hadooplzo_*"] else: lzo_packages += ["hadoop-lzo"] return lzo_packages
def get_lzo_packages(): lzo_packages = [] script_instance = Script.get_instance() if OSCheck.is_suse_family() and int(OSCheck.get_os_major_version()) >= 12: lzo_packages += ["liblzo2-2"] elif OSCheck.is_redhat_family() or OSCheck.is_suse_family(): lzo_packages += ["lzo"] elif OSCheck.is_ubuntu_family(): lzo_packages += ["liblzo2-2"] if OSCheck.is_ubuntu_family(): lzo_packages += [ script_instance.format_package_name("hadooplzo-${stack_version}"), script_instance.format_package_name( "hadooplzo-${stack_version}-native") ] else: lzo_packages += [ script_instance.format_package_name("hadooplzo_${stack_version}"), script_instance.format_package_name( "hadooplzo_${stack_version}-native") ] return lzo_packages
def start(self, env): # Attempt to reconfigure the service before starting self.configure(env) # Create or update the administrator account KerberosScript.create_or_update_administrator_identity() if OSCheck.is_suse_family(): Execute('rckadmind start') Execute('rckrb5kdc start') elif OSCheck.is_ubuntu_family(): Execute('service krb5-kdc start') Execute('service krb5-admin-server start') else: Execute('service krb5kdc start') Execute('service kadmin start')
def status(self, env): import params if OSCheck.is_suse_family(): try: Execute('checkproc `which krb5kdc`') Execute('checkproc `which kadmind`') except Fail as ex: raise ComponentIsNotRunning() elif OSCheck.is_ubuntu_family(): check_process_status(params.kdamin_pid_path) check_process_status(params.krb5kdc_pid_path) else: check_process_status(params.kdamin_pid_path) check_process_status(params.krb5kdc_pid_path)
def __init__(self, config): """ Constructor for RepositoryUtil :type config dict """ # repo templates repo_file = config['repositoryFile'] repo_rhel_suse = config['configurations']['cluster-env']['repo_suse_rhel_template'] repo_ubuntu = config['configurations']['cluster-env']['repo_ubuntu_template'] if is_empty(repo_file): return self.template = repo_rhel_suse if OSCheck.is_redhat_family() or OSCheck.is_suse_family() else repo_ubuntu self.command_repository = CommandRepository(repo_file)
def get_lzo_packages(stack_version_unformatted): lzo_packages = [] if OSCheck.is_redhat_family() or OSCheck.is_suse_family(): lzo_packages += ["lzo", "hadoop-lzo-native"] elif OSCheck.is_ubuntu_family(): lzo_packages += ["liblzo2-2"] underscored_version = stack_version_unformatted.replace('.', '_') dashed_version = stack_version_unformatted.replace('.', '-') stack_version_formatted = format_stack_version(stack_version_unformatted) if stack_version_formatted != "" and compare_versions(stack_version_formatted, '2.2') >= 0: lzo_packages += ["hadooplzo_*"] else: lzo_packages += ["hadoop-lzo"] return lzo_packages
def server_files(): import params rrd_py_path = params.rrd_py_path Directory(rrd_py_path, create_parents=True) rrd_py_file_path = path.join(rrd_py_path, "rrd.py") TemplateConfig(rrd_py_file_path, owner="root", group="root", mode=0755) rrd_file_owner = params.gmetad_user Directory(params.rrdcached_base_dir, owner=rrd_file_owner, group=rrd_file_owner, mode=0755, create_parents=True) if OSCheck.is_suse_family() or OSCheck.is_ubuntu_family(): File(params.ganglia_apache_config_file, content=Template("ganglia.conf.j2"), mode=0644)
def actionexecute(self, env): config = Script.get_config() structured_output = {} try: repo_info_json = config['hostLevelParams']['repo_info'] repo_info_dict = json.loads(repo_info_json) for item in repo_info_dict["repositories"]: base_url = item["base_url"] repo_name = item["repo_name"] repo_id = item["repo_id"] repo_rhel_suse = config['configurations']['cluster-env'][ 'repo_suse_rhel_template'] repo_ubuntu = config['configurations']['cluster-env'][ 'repo_ubuntu_template'] template = repo_rhel_suse if OSCheck.is_suse_family( ) or OSCheck.is_redhat_family() else repo_ubuntu ubuntu_components = [repo_name ] + self.UBUNTU_REPO_COMPONENTS_POSTFIX Repository( repo_id, action="create", base_url=base_url, mirror_list=None, repo_file_name=repo_name, repo_template=template, components=ubuntu_components, # ubuntu specific ) structured_output["repo_update"] = { "exit_code": 0, "message": format("Repository files successfully updated!") } except Exception, exception: Logger.logger.exception( "ERROR: There was an unexpected error while updating repositories" ) raise Fail("Failed to update repo files!")
master_heapsize = ensure_unit_for_memory(config['configurations']['hbase-env']['hbase_master_heapsize']) regionserver_heapsize = ensure_unit_for_memory(config['configurations']['hbase-env']['hbase_regionserver_heapsize']) regionserver_xmn_max = config['configurations']['hbase-env']['hbase_regionserver_xmn_max'] regionserver_xmn_percent = expect("/configurations/hbase-env/hbase_regionserver_xmn_ratio", float) regionserver_xmn_size = calc_xmn_from_xms(regionserver_heapsize, regionserver_xmn_percent, regionserver_xmn_max) hbase_regionserver_shutdown_timeout = expect('/configurations/hbase-env/hbase_regionserver_shutdown_timeout', int, 30) phoenix_hosts = default('/clusterHostInfo/phoenix_query_server_hosts', []) phoenix_enabled = default('/configurations/hbase-env/phoenix_sql_enabled', False) has_phoenix = len(phoenix_hosts) > 0 underscored_version = stack_version_unformatted.replace('.', '_') dashed_version = stack_version_unformatted.replace('.', '-') if OSCheck.is_redhat_family() or OSCheck.is_suse_family(): phoenix_package = format("phoenix_{underscored_version}_*") elif OSCheck.is_ubuntu_family(): phoenix_package = format("phoenix-{dashed_version}-.*") pid_dir = status_params.pid_dir tmp_dir = config['configurations']['hbase-site']['hbase.tmp.dir'] local_dir = config['configurations']['hbase-site']['hbase.local.dir'] ioengine_param = default('/configurations/hbase-site/hbase.bucketcache.ioengine', None) client_jaas_config_file = format("{hbase_conf_dir}/hbase_client_jaas.conf") master_jaas_config_file = format("{hbase_conf_dir}/hbase_master_jaas.conf") regionserver_jaas_config_file = format("{hbase_conf_dir}/hbase_regionserver_jaas.conf") queryserver_jaas_config_file = format("{hbase_conf_dir}/hbase_queryserver_jaas.conf") ganglia_server_hosts = default('/clusterHostInfo/ganglia_server_host', []) # is not passed when ganglia is not present
secure_dn_ports_are_in_use = is_secure_port(dfs_dn_port) or is_secure_port(dfs_dn_https_port) elif dfs_http_policy == "HTTP_AND_HTTPS": secure_dn_ports_are_in_use = is_secure_port(dfs_dn_port) or is_secure_port(dfs_dn_http_port) or is_secure_port(dfs_dn_https_port) else: # params.dfs_http_policy == "HTTP_ONLY" or not defined: secure_dn_ports_are_in_use = is_secure_port(dfs_dn_port) or is_secure_port(dfs_dn_http_port) if secure_dn_ports_are_in_use: hadoop_secure_dn_user = hdfs_user else: hadoop_secure_dn_user = '******' #hadoop params hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix'] hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix'] hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger'] if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.0') >= 0 and compare_versions(hdp_stack_version, '2.1') < 0 and not OSCheck.is_suse_family(): # deprecated rhel jsvc_path jsvc_path = "/usr/libexec/bigtop-utils" else: jsvc_path = "/usr/lib/bigtop-utils" hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize'] namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize'] namenode_opt_newsize = config['configurations']['hadoop-env']['namenode_opt_newsize'] namenode_opt_maxnewsize = config['configurations']['hadoop-env']['namenode_opt_maxnewsize'] namenode_opt_permsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_permsize","128m") namenode_opt_maxpermsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_maxpermsize","256m") jtnode_opt_newsize = "200m" jtnode_opt_maxnewsize = "200m" jtnode_heapsize = "1024m"
dfs_dn_http_port) or utils.is_secure_port( dfs_dn_https_port) else: # params.dfs_http_policy == "HTTP_ONLY" or not defined: secure_dn_ports_are_in_use = utils.is_secure_port( dfs_dn_port) or utils.is_secure_port(dfs_dn_http_port) if secure_dn_ports_are_in_use: hadoop_secure_dn_user = hdfs_user else: hadoop_secure_dn_user = '******' ambari_libs_dir = "/var/lib/ambari-agent/lib" limits_conf_dir = "/etc/security/limits.d" if Script.is_hdp_stack_greater_or_equal( "2.0") and Script.is_hdp_stack_less_than( "2.1") and not OSCheck.is_suse_family(): # deprecated rhel jsvc_path jsvc_path = "/usr/libexec/bigtop-utils" else: jsvc_path = "/usr/lib/bigtop-utils" execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir ulimit_cmd = "ulimit -c unlimited ; " #security params smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab'] hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab'] falcon_user = config['configurations']['falcon-env']['falcon_user'] #exclude file hdfs_exclude_file = default("/clusterHostInfo/decom_dn_hosts", [])
def actionexecute(self, env): num_errors = 0 # Parse parameters config = Script.get_config() repo_rhel_suse = config['configurations']['cluster-env']['repo_suse_rhel_template'] repo_ubuntu = config['configurations']['cluster-env']['repo_ubuntu_template'] template = repo_rhel_suse if OSCheck.is_redhat_family() or OSCheck.is_suse_family() else repo_ubuntu # Handle a SIGTERM and SIGINT gracefully signal.signal(signal.SIGTERM, self.abort_handler) signal.signal(signal.SIGINT, self.abort_handler) # Select dict that contains parameters try: self.repository_version = config['roleParams']['repository_version'] base_urls = json.loads(config['roleParams']['base_urls']) package_list = json.loads(config['roleParams']['package_list']) stack_id = config['roleParams']['stack_id'] except KeyError: # Last try self.repository_version = config['commandParams']['repository_version'] base_urls = json.loads(config['commandParams']['base_urls']) package_list = json.loads(config['commandParams']['package_list']) stack_id = config['commandParams']['stack_id'] stack_name = None self.stack_root_folder = None if stack_id and "-" in stack_id: stack_split = stack_id.split("-") if len(stack_split) == 2: stack_name = stack_split[0].upper() if stack_name in self.STACK_TO_ROOT_FOLDER: self.stack_root_folder = self.STACK_TO_ROOT_FOLDER[stack_name] if self.stack_root_folder is None: raise Fail("Cannot determine the stack's root directory by parsing the stack_id property, {0}".format(str(stack_id))) if self.repository_version is None: raise Fail("Cannot determine the repository version to install") self.repository_version = self.repository_version.strip() # Install/update repositories installed_repositories = [] self.current_repositories = [] self.current_repo_files = set() # Enable base system repositories # We don't need that for RHEL family, because we leave all repos enabled # except disabled HDP* ones if OSCheck.is_suse_family(): self.current_repositories.append('base') elif OSCheck.is_ubuntu_family(): self.current_repo_files.add('base') Logger.info("Will install packages for repository version {0}".format(self.repository_version)) try: append_to_file = False for url_info in base_urls: repo_name, repo_file = self.install_repository(url_info, append_to_file, template) self.current_repositories.append(repo_name) self.current_repo_files.add(repo_file) append_to_file = True installed_repositories = list_ambari_managed_repos() except Exception, err: Logger.logger.exception("Cannot distribute repositories. Error: {0}".format(str(err))) num_errors += 1
lzo_packages = get_lzo_packages(stack_version_unformatted) exclude_packages = [] if not lzo_enabled: exclude_packages += lzo_packages name_node_params = default("/commandParams/namenode", None) #hadoop params hadoop_env_sh_template = config['configurations']['hadoop-env']['content'] #hadoop-env.sh java_home = config['hostLevelParams']['java_home'] java_version = int(config['hostLevelParams']['java_version']) if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.0') >= 0 and compare_versions(hdp_stack_version, '2.1') < 0 and not OSCheck.is_suse_family(): # deprecated rhel jsvc_path jsvc_path = "/usr/libexec/bigtop-utils" else: jsvc_path = "/usr/lib/bigtop-utils" hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize'] namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize'] namenode_opt_newsize = config['configurations']['hadoop-env']['namenode_opt_newsize'] namenode_opt_maxnewsize = config['configurations']['hadoop-env']['namenode_opt_maxnewsize'] namenode_opt_permsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_permsize","128m") namenode_opt_maxpermsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_maxpermsize","256m") jtnode_opt_newsize = "200m" jtnode_opt_maxnewsize = "200m" jtnode_heapsize = "1024m"
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from resource_management import * from utils import get_property_value, get_unstructured_data from ambari_commons.os_check import OSCheck krb5_conf_dir = '/etc' krb5_conf_file = 'krb5.conf' krb5_conf_path = krb5_conf_dir + '/' + krb5_conf_file if OSCheck.is_suse_family(): kdc_conf_dir = '/var/lib/kerberos/krb5kdc' elif OSCheck.is_ubuntu_family(): kdc_conf_dir = '/etc/krb5kdc' else: kdc_conf_dir = '/var/kerberos/krb5kdc' kdc_conf_file = 'kdc.conf' kdc_conf_path = kdc_conf_dir + '/' + kdc_conf_file kadm5_acl_dir = kdc_conf_dir # Typically kadm5.acl and kdc.conf exist in the same directory kadm5_acl_file = 'kadm5.acl' kadm5_acl_path = kadm5_acl_dir + '/' + kadm5_acl_file config = Script.get_config() tmp_dir = Script.get_tmp_dir() host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
regionserver_xmn_percent = config['configurations']['hbase-env']['hbase_regionserver_xmn_ratio'] regionserver_xmn_size = calc_xmn_from_xms(regionserver_heapsize, regionserver_xmn_percent, regionserver_xmn_max) phoenix_hosts = default('/clusterHostInfo/phoenix_query_server_hosts', []) phoenix_enabled = default('/configurations/hbase-env/phoenix_sql_enabled', False) has_phoenix = len(phoenix_hosts) > 0 if not has_phoenix and not phoenix_enabled: exclude_packages = ['phoenix*'] else: exclude_packages = [] underscored_version = stack_version_unformatted.replace('.', '_') dashed_version = stack_version_unformatted.replace('.', '-') if OSCheck.is_redhat_family() or OSCheck.is_suse_family(): phoenix_package = format("phoenix_{underscored_version}_*") elif OSCheck.is_ubuntu_family(): phoenix_package = format("phoenix-{dashed_version}-.*") pid_dir = status_params.pid_dir tmp_dir = config['configurations']['hbase-site']['hbase.tmp.dir'] local_dir = config['configurations']['hbase-site']['hbase.local.dir'] client_jaas_config_file = format("{hbase_conf_dir}/hbase_client_jaas.conf") master_jaas_config_file = format("{hbase_conf_dir}/hbase_master_jaas.conf") regionserver_jaas_config_file = format("{hbase_conf_dir}/hbase_regionserver_jaas.conf") queryserver_jaas_config_file = format("{hbase_conf_dir}/hbase_queryserver_jaas.conf") ganglia_server_hosts = default('/clusterHostInfo/ganglia_server_host', []) # is not passed when ganglia is not present ganglia_server_host = '' if len(ganglia_server_hosts) == 0 else ganglia_server_hosts[0]
hadoop_conf_empty_dir = None versioned_hdp_root = '/usr/hdp/current' #security params security_enabled = config['configurations']['cluster-env']['security_enabled'] #java params java_home = config['hostLevelParams']['java_home'] #hadoop params hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix'] hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix'] hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger'] if Script.is_hdp_stack_greater_or_equal("2.0") and Script.is_hdp_stack_less_than("2.1") and not OSCheck.is_suse_family(): # deprecated rhel jsvc_path jsvc_path = "/usr/libexec/bigtop-utils" else: jsvc_path = "/usr/lib/bigtop-utils" hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize'] namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize'] namenode_opt_newsize = config['configurations']['hadoop-env']['namenode_opt_newsize'] namenode_opt_maxnewsize = config['configurations']['hadoop-env']['namenode_opt_maxnewsize'] namenode_opt_permsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_permsize","128m") namenode_opt_maxpermsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_maxpermsize","256m") jtnode_opt_newsize = "200m" jtnode_opt_maxnewsize = "200m" jtnode_heapsize = "1024m"