def get_lzo_packages(): lzo_packages = [] script_instance = Script.get_instance() if OSCheck.is_suse_family() and int(OSCheck.get_os_major_version()) >= 12: lzo_packages += ["liblzo2-2"] elif OSCheck.is_redhat_family() or OSCheck.is_suse_family(): lzo_packages += ["lzo"] elif OSCheck.is_ubuntu_family(): lzo_packages += ["liblzo2-2"] stack_version_unformatted = stack_features.get_stack_feature_version( Script.get_config() ) # only used to check stack_feature, NOT as package version! if stack_version_unformatted and check_stack_feature( StackFeature.ROLLING_UPGRADE, stack_version_unformatted): if OSCheck.is_ubuntu_family(): lzo_packages += [ script_instance.format_package_name( "hadooplzo-${stack_version}"), script_instance.format_package_name( "hadooplzo-${stack_version}-native") ] else: lzo_packages += [ script_instance.format_package_name( "hadooplzo_${stack_version}"), script_instance.format_package_name( "hadooplzo_${stack_version}-native") ] return lzo_packages
def get_lzo_packages(stack_version_unformatted): lzo_packages = [] script_instance = Script.get_instance() if OSCheck.is_redhat_family() or OSCheck.is_suse_family(): lzo_packages += ["lzo", "hadoop-lzo-native"] elif OSCheck.is_ubuntu_family(): lzo_packages += ["liblzo2-2"] if stack_version_unformatted and check_stack_feature( StackFeature.ROLLING_UPGRADE, stack_version_unformatted): if OSCheck.is_ubuntu_family(): lzo_packages += [ script_instance.format_package_name( "hadooplzo-${stack_version}"), script_instance.format_package_name( "hadooplzo-${stack_version}-native") ] else: lzo_packages += [ script_instance.format_package_name( "hadooplzo_${stack_version}"), script_instance.format_package_name( "hadooplzo_${stack_version}-native") ] else: lzo_packages += ["hadoop-lzo"] return lzo_packages
def install_packages(self, package_list): """ Actually install the packages using the package manager. :param package_list: List of package names to install :return: Returns 0 if no errors were found, and 1 otherwise. """ ret_code = 0 # Install packages packages_were_checked = False try: Package(self.get_base_packages_to_install()) packages_installed_before = [] allInstalledPackages(packages_installed_before) packages_installed_before = [ package[0] for package in packages_installed_before ] packages_were_checked = True filtered_package_list = self.filter_package_list(package_list) for package in filtered_package_list: name = self.format_package_name(package['name'], self.repository_version) Package( name, use_repos=list(self.current_repo_files) if OSCheck.is_ubuntu_family() else self.current_repositories, skip_repos=[self.REPO_FILE_NAME_PREFIX + "*"] if OSCheck.is_redhat_family() else []) except Exception, err: ret_code = 1 Logger.logger.exception( "Package Manager failed to install packages. Error: {0}". format(str(err))) # Remove already installed packages in case of fail if packages_were_checked and packages_installed_before: packages_installed_after = [] allInstalledPackages(packages_installed_after) packages_installed_after = [ package[0] for package in packages_installed_after ] packages_installed_before = set(packages_installed_before) new_packages_installed = [ package for package in packages_installed_after if package not in packages_installed_before ] if OSCheck.is_ubuntu_family(): package_version_string = self.repository_version.replace( '.', '-') else: package_version_string = self.repository_version.replace( '-', '_') package_version_string = package_version_string.replace( '.', '_') for package in new_packages_installed: if package_version_string and (package_version_string in package): Package(package, action="remove")
def list_ambari_managed_repos(stack_name): """ Lists all repositories that are present at host """ stack_name = stack_name.upper() # TODO : get it dynamically from the server repository_names = [stack_name, stack_name + "-UTILS"] if OSCheck.is_ubuntu_family(): repo_dir = '/etc/apt/sources.list.d/' elif OSCheck.is_redhat_family(): # Centos/RHEL 5/6 repo_dir = '/etc/yum.repos.d/' elif OSCheck.is_suse_family(): repo_dir = '/etc/zypp/repos.d/' else: raise Fail('Can not dermine repo dir') repos = [] for name in repository_names: # List all files that match pattern files = glob.glob(os.path.join(repo_dir, name) + '*') for f in files: filename = os.path.basename(f) # leave out extension reponame = os.path.splitext(filename)[0] repos.append(reponame) # get uniq strings seen = set() uniq = [s for s in repos if not (s in seen or seen.add(s))] return uniq
def server_files(): import params rrd_py_path = params.rrd_py_path Directory(rrd_py_path, recursive=True ) rrd_py_file_path = path.join(rrd_py_path, "rrd.py") TemplateConfig(rrd_py_file_path, owner="root", group="root", mode=0755 ) rrd_file_owner = params.gmetad_user Directory(params.rrdcached_base_dir, owner=rrd_file_owner, group=rrd_file_owner, mode=0755, recursive=True ) if OSCheck.is_suse_family() or OSCheck.is_ubuntu_family(): File( params.ganglia_apache_config_file, content = Template("ganglia.conf.j2"), mode = 0644 )
def list_ambari_managed_repos(stack_name): """ Lists all repositories that are present at host """ stack_name = stack_name.upper() # TODO : get it dynamically from the server repository_names = [stack_name, stack_name + "-UTILS" ] if OSCheck.is_ubuntu_family(): repo_dir = '/etc/apt/sources.list.d/' elif OSCheck.is_redhat_family(): # Centos/RHEL 5/6 repo_dir = '/etc/yum.repos.d/' elif OSCheck.is_suse_family(): repo_dir = '/etc/zypp/repos.d/' else: raise Fail('Can not dermine repo dir') repos = [] for name in repository_names: # List all files that match pattern files = glob.glob(os.path.join(repo_dir, name) + '*') for f in files: filename = os.path.basename(f) # leave out extension reponame = os.path.splitext(filename)[0] repos.append(reponame) # get uniq strings seen = set() uniq = [s for s in repos if not (s in seen or seen.add(s))] return uniq
def execute_transparent_huge_page_check(self, config): Logger.info("Transparent huge page check started.") thp_regex = "\[(.+)\]" file_name = None if OSCheck.is_ubuntu_family(): file_name = THP_FILE_UBUNTU elif OSCheck.is_redhat_family(): file_name = THP_FILE_REDHAT if file_name and os.path.isfile(file_name): with open(file_name) as f: file_content = f.read() transparent_huge_page_check_structured_output = { "exit_code": 0, "message": str(re.search(thp_regex, file_content).groups()[0]) } else: transparent_huge_page_check_structured_output = { "exit_code": 0, "message": "" } Logger.info("Transparent huge page check completed.") return transparent_huge_page_check_structured_output
def install_packages(self, package_list): """ Actually install the packages using the package manager. :param package_list: List of package names to install :return: Returns 0 if no errors were found, and 1 otherwise. """ ret_code = 0 # Install packages packages_were_checked = False try: Package( "hdp-select", action="upgrade", ) packages_installed_before = [] allInstalledPackages(packages_installed_before) packages_installed_before = [ package[0] for package in packages_installed_before ] packages_were_checked = True filtered_package_list = self.filter_package_list(package_list) for package in filtered_package_list: name = self.format_package_name(package['name']) Package( name, action= "upgrade" # this enables upgrading non-versioned packages, despite the fact they exist. Needed by 'mahout' which is non-version but have to be updated ) except Exception, err: ret_code = 1 Logger.logger.exception( "Package Manager failed to install packages. Error: {0}". format(str(err))) # Remove already installed packages in case of fail if packages_were_checked and packages_installed_before: packages_installed_after = [] allInstalledPackages(packages_installed_after) packages_installed_after = [ package[0] for package in packages_installed_after ] packages_installed_before = set(packages_installed_before) new_packages_installed = [ package for package in packages_installed_after if package not in packages_installed_before ] if OSCheck.is_ubuntu_family(): package_version_string = self.repository_version.replace( '.', '-') else: package_version_string = self.repository_version.replace( '-', '_') package_version_string = package_version_string.replace( '.', '_') for package in new_packages_installed: if package_version_string and (package_version_string in package): Package(package, action="remove")
def get_serivice_params(self): self.system = System.get_instance() if OSCheck.is_suse_family() or OSCheck.is_ubuntu_family(): self.service_name = "apache2" self.httpd_conf_dir = '/etc/apache2' else: self.service_name = "httpd" self.httpd_conf_dir = '/etc/httpd/conf'
def install_packages(self, package_list): """ Actually install the packages using the package manager. :param package_list: List of package names to install :return: Returns 0 if no errors were found, and 1 otherwise. """ ret_code = 0 # Clear cache of package manager right before installation of the packages self._clear_package_manager_cache() # Install packages packages_were_checked = False try: Package(self.get_base_packages_to_install()) packages_installed_before = [] allInstalledPackages(packages_installed_before) packages_installed_before = [package[0] for package in packages_installed_before] packages_were_checked = True filtered_package_list = self.filter_package_list(package_list) for package in filtered_package_list: name = self.format_package_name(package['name'], self.repository_version) Package(name, use_repos=list(self.current_repo_files) if OSCheck.is_ubuntu_family() else self.current_repositories, skip_repos=[self.REPO_FILE_NAME_PREFIX + "*"] if OSCheck.is_redhat_family() else []) except Exception, err: ret_code = 1 Logger.logger.exception("Package Manager failed to install packages. Error: {0}".format(str(err))) # Remove already installed packages in case of fail if packages_were_checked and packages_installed_before: packages_installed_after = [] allInstalledPackages(packages_installed_after) packages_installed_after = [package[0] for package in packages_installed_after] packages_installed_before = set(packages_installed_before) new_packages_installed = [package for package in packages_installed_after if package not in packages_installed_before] if OSCheck.is_ubuntu_family(): package_version_string = self.repository_version.replace('.', '-') else: package_version_string = self.repository_version.replace('-', '_') package_version_string = package_version_string.replace('.', '_') for package in new_packages_installed: if package_version_string and (package_version_string in package): Package(package, action="remove")
def stop(self, env): if OSCheck.is_suse_family(): Execute('rckadmind stop') Execute('rckrb5kdc stop') elif OSCheck.is_ubuntu_family(): Execute('service krb5-kdc stop') Execute('service krb5-admin-server stop') else: Execute('service krb5kdc stop') Execute('service kadmin stop')
def getRepoDir(self): if OSCheck.is_redhat_family(): return "/etc/yum.repos.d" elif OSCheck.is_suse_family(): return "/etc/zypp/repos.d" elif OSCheck.is_ubuntu_family(): return "/etc/apt/sources.list.d" else: raise Exception("Unsupported OS family '{0}'".format( OSCheck.get_os_family()))
def get_lzo_packages(stack_version_unformatted): lzo_packages = ["hadoop-lzo"] if OSCheck.is_redhat_family() or OSCheck.is_suse_family(): lzo_packages += ["lzo", "hadoop-lzo-native"] elif OSCheck.is_ubuntu_family(): lzo_packages += ["liblzo2-2"] underscored_version = stack_version_unformatted.replace('.', '_') dashed_version = stack_version_unformatted.replace('.', '-') hdp_stack_version = format_hdp_stack_version(stack_version_unformatted) if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0: if OSCheck.is_redhat_family() or OSCheck.is_suse_family(): lzo_packages += [format("hadooplzo_{underscored_version}_*")] elif OSCheck.is_ubuntu_family(): lzo_packages += [format("hadooplzo_{dashed_version}_*")] return lzo_packages
def install_packages(self, package_list): """ Actually install the packages using the package manager. :param package_list: List of package names to install :return: Returns 0 if no errors were found, and 1 otherwise. """ ret_code = 0 config = self.get_config() agent_stack_retry_on_unavailability = cbool(config['hostLevelParams']['agent_stack_retry_on_unavailability']) agent_stack_retry_count = cint(config['hostLevelParams']['agent_stack_retry_count']) # Install packages packages_were_checked = False stack_selector_package = stack_tools.get_stack_tool_package(stack_tools.STACK_SELECTOR_NAME) try: Package(stack_selector_package, action="upgrade", retry_on_repo_unavailability=agent_stack_retry_on_unavailability, retry_count=agent_stack_retry_count ) packages_installed_before = [] allInstalledPackages(packages_installed_before) packages_installed_before = [package[0] for package in packages_installed_before] packages_were_checked = True filtered_package_list = self.filter_package_list(package_list) for package in filtered_package_list: name = self.format_package_name(package['name']) Package(name, action="upgrade", # this enables upgrading non-versioned packages, despite the fact they exist. Needed by 'mahout' which is non-version but have to be updated retry_on_repo_unavailability=agent_stack_retry_on_unavailability, retry_count=agent_stack_retry_count ) except Exception, err: ret_code = 1 Logger.logger.exception("Package Manager failed to install packages. Error: {0}".format(str(err))) # Remove already installed packages in case of fail if packages_were_checked and packages_installed_before: packages_installed_after = [] allInstalledPackages(packages_installed_after) packages_installed_after = [package[0] for package in packages_installed_after] packages_installed_before = set(packages_installed_before) new_packages_installed = [package for package in packages_installed_after if package not in packages_installed_before] if OSCheck.is_ubuntu_family(): package_version_string = self.repository_version.replace('.', '-') else: package_version_string = self.repository_version.replace('-', '_') package_version_string = package_version_string.replace('.', '_') for package in new_packages_installed: if package_version_string and (package_version_string in package): Package(package, action="remove")
def get_lzo_packages(stack_version_unformatted): lzo_packages = [] script_instance = Script.get_instance() if OSCheck.is_suse_family() and int(OSCheck.get_os_major_version()) >= 12: lzo_packages += ["liblzo2-2", "hadoop-lzo-native"] elif OSCheck.is_redhat_family() or OSCheck.is_suse_family(): lzo_packages += ["lzo", "hadoop-lzo-native"] elif OSCheck.is_ubuntu_family(): lzo_packages += ["liblzo2-2"] if stack_version_unformatted and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_unformatted): if OSCheck.is_ubuntu_family(): lzo_packages += [script_instance.format_package_name("hadooplzo-${stack_version}") , script_instance.format_package_name("hadooplzo-${stack_version}-native")] else: lzo_packages += [script_instance.format_package_name("hadooplzo_${stack_version}"), script_instance.format_package_name("hadooplzo_${stack_version}-native")] else: lzo_packages += ["hadoop-lzo"] return lzo_packages
def get_base_packages_to_install(self): """ HACK: list packages which should be installed without disabling any repos. (This is planned to fix in Ambari-2.2) """ base_packages_to_install = ['fuse'] if OSCheck.is_suse_family() or OSCheck.is_ubuntu_family(): base_packages_to_install.append('libfuse2') else: base_packages_to_install.append('fuse-libs') return base_packages_to_install
def get_clearcache_cmd(self): if OSCheck.is_redhat_family(): Logger.info("Clear repository cache for the RedHat OS family"); return ("/usr/bin/yum", "clean", "all") elif OSCheck.is_suse_family(): Logger.info("Clear repository cache for the SUSE OS family"); return ('/usr/bin/zypper', 'refresh') elif OSCheck.is_ubuntu_family(): Logger.info("Clear repository cache for the Ubuntu OS family"); return ('/usr/bin/apt-get', 'update') else: raise Exception("Unsupported OS family: '{0}' ".format(OSCheck.get_os_family()))
def _clear_package_manager_cache(self): package_manager_cmd = "" if OSCheck.is_redhat_family(): package_manager_cmd = ("/usr/bin/yum", "clean", "metadata") if OSCheck.is_suse_family(): package_manager_cmd = ("/usr/bin/zypper", "-q", "-n", "clean") if OSCheck.is_ubuntu_family(): return Logger.debug("Clearing repo manager metadata") Execute(package_manager_cmd, logoutput=False, sudo=True)
def get_lzo_packages(stack_version_unformatted): lzo_packages = [] if OSCheck.is_redhat_family() or OSCheck.is_suse_family(): lzo_packages += ["lzo", "hadoop-lzo-native"] elif OSCheck.is_ubuntu_family(): lzo_packages += ["liblzo2-2"] if stack_version_unformatted and check_stack_feature( StackFeature.ROLLING_UPGRADE, stack_version_unformatted): lzo_packages += ["hadooplzo_*"] else: lzo_packages += ["hadoop-lzo"] return lzo_packages
def get_elastic_config_path(default="/etc/default/elasticsearch"): """ Defines the path to the Elasticsearch environment file. This path will differ based on the OS family. :param default: The path used if the OS family is not recognized. """ path = default if OSCheck.is_redhat_family(): path = "/etc/sysconfig/elasticsearch" elif OSCheck.is_ubuntu_family(): path = "/etc/default/elasticsearch" else: Logger.error("Unexpected OS family; using default path={0}".format(path)) return path
def get_lzo_packages(): lzo_packages = [] script_instance = Script.get_instance() if OSCheck.is_suse_family() and int(OSCheck.get_os_major_version()) >= 12: lzo_packages += ["liblzo2-2"] elif OSCheck.is_redhat_family() or OSCheck.is_suse_family(): lzo_packages += ["lzo"] elif OSCheck.is_ubuntu_family(): lzo_packages += ["liblzo2-2"] if OSCheck.is_ubuntu_family(): lzo_packages += [ script_instance.format_package_name("hadooplzo-${stack_version}"), script_instance.format_package_name( "hadooplzo-${stack_version}-native") ] else: lzo_packages += [ script_instance.format_package_name("hadooplzo_${stack_version}"), script_instance.format_package_name( "hadooplzo_${stack_version}-native") ] return lzo_packages
def start(self, env): # Attempt to reconfigure the service before starting self.configure(env) # Create or update the administrator account KerberosScript.create_or_update_administrator_identity() if OSCheck.is_suse_family(): Execute('rckadmind start') Execute('rckrb5kdc start') elif OSCheck.is_ubuntu_family(): Execute('service krb5-kdc start') Execute('service krb5-admin-server start') else: Execute('service krb5kdc start') Execute('service kadmin start')
def __init__(self, host, shared_state): threading.Thread.__init__(self) self.host = host self.shared_state = shared_state self.status = { "start_time": None, "return_code": None, } log_file = os.path.join(self.shared_state.bootdir, self.host + ".log") self.host_log = HostLog(log_file) self.daemon = True if OSCheck.is_ubuntu_family(): self.AMBARI_REPO_FILENAME = self.AMBARI_REPO_FILENAME + ".list" else: self.AMBARI_REPO_FILENAME = self.AMBARI_REPO_FILENAME + ".repo"
def status(self, env): import params if OSCheck.is_suse_family(): try: Execute('checkproc `which krb5kdc`') Execute('checkproc `which kadmind`') except Fail as ex: raise ComponentIsNotRunning() elif OSCheck.is_ubuntu_family(): check_process_status(params.kdamin_pid_path) check_process_status(params.krb5kdc_pid_path) else: check_process_status(params.kdamin_pid_path) check_process_status(params.krb5kdc_pid_path)
def turn_off_autostart(service): if OSCheck.is_ubuntu_family(): Execute(('update-rc.d', service, 'disable'), path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin', sudo = True ) Execute(('service', service, 'stop'), sudo = True, ignore_failures=True, ) File(format('/etc/init/{service}.override'), # disable upstart job content = 'manual', ) else: Execute(('chkconfig', service, 'off'), path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin', sudo = True, )
def get_lzo_packages(stack_version_unformatted): lzo_packages = [] if OSCheck.is_redhat_family() or OSCheck.is_suse_family(): lzo_packages += ["lzo", "hadoop-lzo-native"] elif OSCheck.is_ubuntu_family(): lzo_packages += ["liblzo2-2"] underscored_version = stack_version_unformatted.replace('.', '_') dashed_version = stack_version_unformatted.replace('.', '-') stack_version_formatted = format_stack_version(stack_version_unformatted) if stack_version_formatted != "" and compare_versions(stack_version_formatted, '2.2') >= 0: lzo_packages += ["hadooplzo_*"] else: lzo_packages += ["hadoop-lzo"] return lzo_packages
def copyNeededFiles(self): # Copying the files fileToCopy = self.getRepoFile() target = self.getRemoteName(self.AMBARI_REPO_FILENAME) self.host_log.write("==========================\n") self.host_log.write("Copying repo file to 'tmp' folder...") params = self.shared_state scp = SCP(params.user, params.sshkey_file, self.host, fileToCopy, target, params.bootdir, self.host_log) retcode1 = scp.run() self.host_log.write("\n") # Move file to repo dir self.host_log.write("==========================\n") self.host_log.write("Moving file to repo dir...") targetDir = self.getRepoDir() command = self.getMoveRepoFileCommand(targetDir) ssh = SSH(params.user, params.sshkey_file, self.host, command, params.bootdir, self.host_log) retcode2 = ssh.run() self.host_log.write("\n") # Update repo cache for ubuntu OS if OSCheck.is_ubuntu_family(): self.host_log.write("==========================\n") self.host_log.write("Update apt cache of repository...") command = self.getAptUpdateCommand() ssh = SSH(params.user, params.sshkey_file, self.host, command, params.bootdir, self.host_log) retcode2 = ssh.run() self.host_log.write("\n") self.host_log.write("==========================\n") self.host_log.write("Copying setup script file...") fileToCopy = params.setup_agent_file target = self.getRemoteName(self.SETUP_SCRIPT_FILENAME) scp = SCP(params.user, params.sshkey_file, self.host, fileToCopy, target, params.bootdir, self.host_log) retcode3 = scp.run() self.host_log.write("\n") return max(retcode1["exitstatus"], retcode2["exitstatus"], retcode3["exitstatus"])
def server_files(): import params rrd_py_path = params.rrd_py_path Directory(rrd_py_path, create_parents=True) rrd_py_file_path = path.join(rrd_py_path, "rrd.py") TemplateConfig(rrd_py_file_path, owner="root", group="root", mode=0755) rrd_file_owner = params.gmetad_user Directory(params.rrdcached_base_dir, owner=rrd_file_owner, group=rrd_file_owner, mode=0755, create_parents=True) if OSCheck.is_suse_family() or OSCheck.is_ubuntu_family(): File(params.ganglia_apache_config_file, content=Template("ganglia.conf.j2"), mode=0644)
def turn_off_autostart(service): if OSCheck.is_ubuntu_family(): Execute(('update-rc.d', service, 'disable'), path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin', sudo=True) Execute( ('service', service, 'stop'), sudo=True, ignore_failures=True, ) File( format('/etc/init/{service}.override'), # disable upstart job content='manual', ) else: Execute( ('chkconfig', service, 'off'), path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin', sudo=True, )
def checkSudoPackage(self): """ Checking 'sudo' package on remote host """ self.host_log.write("==========================\n") self.host_log.write("Checking 'sudo' package on remote host...") params = self.shared_state if OSCheck.is_ubuntu_family(): command = "dpkg --get-selections|grep -e '^sudo\s*install'" else: command = "rpm -qa | grep -e '^sudo\-'" ssh = SSH(params.user, params.sshkey_file, self.host, command, params.bootdir, self.host_log, errorMessage="Error: Sudo command is not available. " "Please install the sudo command.") retcode = ssh.run() self.host_log.write("\n") return retcode
def install_packages(self, package_list): """ Actually install the packages using the package manager. :param package_list: List of package names to install :return: Returns 0 if no errors were found, and 1 otherwise. """ ret_code = 0 config = self.get_config() agent_stack_retry_on_unavailability = cbool( config['hostLevelParams']['agent_stack_retry_on_unavailability']) agent_stack_retry_count = cint( config['hostLevelParams']['agent_stack_retry_count']) # Install packages packages_were_checked = False stack_selector_package = stack_tools.get_stack_tool_package( stack_tools.STACK_SELECTOR_NAME) try: Package(stack_selector_package, action="upgrade", retry_on_repo_unavailability= agent_stack_retry_on_unavailability, retry_count=agent_stack_retry_count) packages_installed_before = [] allInstalledPackages(packages_installed_before) packages_installed_before = [ package[0] for package in packages_installed_before ] packages_were_checked = True filtered_package_list = self.filter_package_list(package_list) try: available_packages_in_repos = packages_analyzer.get_available_packages_in_repos( config['repositoryFile']['repositories']) except Exception: available_packages_in_repos = [] for package in filtered_package_list: name = self.get_package_from_available( package['name'], available_packages_in_repos) Package( name, action= "upgrade", # this enables upgrading non-versioned packages, despite the fact they exist. Needed by 'mahout' which is non-version but have to be updated retry_on_repo_unavailability= agent_stack_retry_on_unavailability, retry_count=agent_stack_retry_count) except Exception as err: ret_code = 1 Logger.logger.exception( "Package Manager failed to install packages. Error: {0}". format(str(err))) # Remove already installed packages in case of fail if packages_were_checked and packages_installed_before: packages_installed_after = [] allInstalledPackages(packages_installed_after) packages_installed_after = [ package[0] for package in packages_installed_after ] packages_installed_before = set(packages_installed_before) new_packages_installed = [ package for package in packages_installed_after if package not in packages_installed_before ] if OSCheck.is_ubuntu_family(): package_version_string = self.repository_version.replace( '.', '-') else: package_version_string = self.repository_version.replace( '-', '_') package_version_string = package_version_string.replace( '.', '_') for package in new_packages_installed: if package_version_string and (package_version_string in package): Package(package, action="remove") if not verifyDependencies(): ret_code = 1 Logger.logger.error("Failure while verifying dependencies") Logger.logger.error( "*******************************************************************************" ) Logger.logger.error( "Manually verify and fix package dependencies and then re-run install_packages" ) Logger.logger.error( "*******************************************************************************" ) # Compute the actual version in order to save it in structured out try: if ret_code == 0: self.compute_actual_version() else: self.check_partial_install() except Fail as err: ret_code = 1 Logger.logger.exception( "Failure while computing actual version. Error: {0}".format( str(err))) return ret_code
def actionexecute(self, env): num_errors = 0 # Parse parameters config = Script.get_config() repo_rhel_suse = config['configurations']['cluster-env']['repo_suse_rhel_template'] repo_ubuntu = config['configurations']['cluster-env']['repo_ubuntu_template'] template = repo_rhel_suse if OSCheck.is_redhat_family() or OSCheck.is_suse_family() else repo_ubuntu # Handle a SIGTERM and SIGINT gracefully signal.signal(signal.SIGTERM, self.abort_handler) signal.signal(signal.SIGINT, self.abort_handler) # Select dict that contains parameters try: self.repository_version = config['roleParams']['repository_version'] base_urls = json.loads(config['roleParams']['base_urls']) package_list = json.loads(config['roleParams']['package_list']) stack_id = config['roleParams']['stack_id'] except KeyError: # Last try self.repository_version = config['commandParams']['repository_version'] base_urls = json.loads(config['commandParams']['base_urls']) package_list = json.loads(config['commandParams']['package_list']) stack_id = config['commandParams']['stack_id'] stack_name = None self.stack_root_folder = None if stack_id and "-" in stack_id: stack_split = stack_id.split("-") if len(stack_split) == 2: stack_name = stack_split[0].upper() if stack_name in self.STACK_TO_ROOT_FOLDER: self.stack_root_folder = self.STACK_TO_ROOT_FOLDER[stack_name] if self.stack_root_folder is None: raise Fail("Cannot determine the stack's root directory by parsing the stack_id property, {0}".format(str(stack_id))) if self.repository_version is None: raise Fail("Cannot determine the repository version to install") self.repository_version = self.repository_version.strip() # Install/update repositories installed_repositories = [] self.current_repositories = [] self.current_repo_files = set() # Enable base system repositories # We don't need that for RHEL family, because we leave all repos enabled # except disabled HDP* ones if OSCheck.is_suse_family(): self.current_repositories.append('base') elif OSCheck.is_ubuntu_family(): self.current_repo_files.add('base') Logger.info("Will install packages for repository version {0}".format(self.repository_version)) try: append_to_file = False for url_info in base_urls: repo_name, repo_file = self.install_repository(url_info, append_to_file, template) self.current_repositories.append(repo_name) self.current_repo_files.add(repo_file) append_to_file = True installed_repositories = list_ambari_managed_repos() except Exception, err: Logger.logger.exception("Cannot distribute repositories. Error: {0}".format(str(err))) num_errors += 1
hive_metastore_win_service_name = "metastore" hive_client_win_service_name = "hwi" hive_server_win_service_name = "hiveserver2" webhcat_server_win_service_name = "templeton" else: hive_pid_dir = config['configurations']['hive-env']['hive_pid_dir'] hive_pid = 'hive-server.pid' hive_metastore_pid = 'hive.pid' hcat_pid_dir = config['configurations']['hive-env'][ 'hcat_pid_dir'] #hcat_pid_dir webhcat_pid_file = format('{hcat_pid_dir}/webhcat.pid') process_name = 'mysqld' if OSCheck.is_suse_family() or OSCheck.is_ubuntu_family(): daemon_name = 'mysql' else: daemon_name = 'mysqld' # Security related/required params hostname = config['hostname'] security_enabled = config['configurations']['cluster-env'][ 'security_enabled'] hadoop_conf_dir = "/etc/hadoop/conf" kinit_path_local = functions.get_kinit_path( default('/configurations/kerberos-env/executable_search_paths', None)) tmp_dir = Script.get_tmp_dir() hdfs_user = config['configurations']['hadoop-env']['hdfs_user'] hive_user = config['configurations']['hive-env']['hive_user'] hive_conf_dir = "/etc/hive/conf"
def actionexecute(self, env): num_errors = 0 # Parse parameters config = Script.get_config() repo_rhel_suse = config['configurations']['cluster-env'][ 'repo_suse_rhel_template'] repo_ubuntu = config['configurations']['cluster-env'][ 'repo_ubuntu_template'] template = repo_rhel_suse if OSCheck.is_redhat_family( ) or OSCheck.is_suse_family() else repo_ubuntu # Handle a SIGTERM and SIGINT gracefully signal.signal(signal.SIGTERM, self.abort_handler) signal.signal(signal.SIGINT, self.abort_handler) # Select dict that contains parameters try: self.repository_version = config['roleParams'][ 'repository_version'] base_urls = json.loads(config['roleParams']['base_urls']) package_list = json.loads(config['roleParams']['package_list']) stack_id = config['roleParams']['stack_id'] except KeyError: # Last try self.repository_version = config['commandParams'][ 'repository_version'] base_urls = json.loads(config['commandParams']['base_urls']) package_list = json.loads(config['commandParams']['package_list']) stack_id = config['commandParams']['stack_id'] # current stack information self.current_hdp_stack_version = None if 'stack_version' in config['hostLevelParams']: current_stack_version_unformatted = str( config['hostLevelParams']['stack_version']) self.current_hdp_stack_version = format_hdp_stack_version( current_stack_version_unformatted) stack_name = None self.stack_root_folder = None if stack_id and "-" in stack_id: stack_split = stack_id.split("-") if len(stack_split) == 2: stack_name = stack_split[0].upper() if stack_name in self.STACK_TO_ROOT_FOLDER: self.stack_root_folder = self.STACK_TO_ROOT_FOLDER[ stack_name] if self.stack_root_folder is None: raise Fail( "Cannot determine the stack's root directory by parsing the stack_id property, {0}" .format(str(stack_id))) if self.repository_version is None: raise Fail("Cannot determine the repository version to install") self.repository_version = self.repository_version.strip() # Install/update repositories installed_repositories = [] self.current_repositories = [] self.current_repo_files = set() # Enable base system repositories # We don't need that for RHEL family, because we leave all repos enabled # except disabled HDP* ones if OSCheck.is_suse_family(): self.current_repositories.append('base') elif OSCheck.is_ubuntu_family(): self.current_repo_files.add('base') Logger.info("Will install packages for repository version {0}".format( self.repository_version)) try: append_to_file = False for url_info in base_urls: repo_name, repo_file = self.install_repository( url_info, append_to_file, template) self.current_repositories.append(repo_name) self.current_repo_files.add(repo_file) append_to_file = True installed_repositories = list_ambari_managed_repos() except Exception, err: Logger.logger.exception( "Cannot distribute repositories. Error: {0}".format(str(err))) num_errors += 1
def install_packages(self, package_list): """ Actually install the packages using the package manager. :param package_list: List of package names to install :return: Returns 0 if no errors were found, and 1 otherwise. """ ret_code = 0 config = self.get_config() agent_stack_retry_on_unavailability = cbool( config['ambariLevelParams']['agent_stack_retry_on_unavailability']) agent_stack_retry_count = cint( config['ambariLevelParams']['agent_stack_retry_count']) # Install packages packages_were_checked = False packages_installed_before = [] stack_selector_package = stack_tools.get_stack_tool_package( stack_tools.STACK_SELECTOR_NAME) try: # install the stack-selector; we need to supply the action as "upgrade" here since the normal # install command will skip if the package is already installed in the system. # This is required for non-versioned components, like stack-select, since each version of # the stack comes with one. Also, scope the install by repository since we need to pick a # specific repo that the stack-select tools are coming out of in case there are multiple # patches installed repositories = config['repositoryFile']['repositories'] command_repos = CommandRepository(config['repositoryFile']) repository_ids = [ repository['repoId'] for repository in repositories ] repos_to_use = {} for repo_id in repository_ids: if repo_id in self.repo_files: repos_to_use[repo_id] = self.repo_files[repo_id] self.repo_mgr.upgrade_package( stack_selector_package, RepoCallContext(ignore_errors=False, use_repos=repos_to_use, retry_on_repo_unavailability= agent_stack_retry_on_unavailability, retry_count=agent_stack_retry_count)) packages_installed_before = self.repo_mgr.installed_packages() packages_installed_before = [ package[0] for package in packages_installed_before ] packages_were_checked = True filtered_package_list = self.filter_package_list(package_list) try: available_packages_in_repos = self.repo_mgr.get_available_packages_in_repos( command_repos) except Exception: available_packages_in_repos = [] installation_context = RepoCallContext( ignore_errors=False, retry_on_repo_unavailability= agent_stack_retry_on_unavailability, retry_count=agent_stack_retry_count) for package in filtered_package_list: name = self.get_package_from_available( package['name'], available_packages_in_repos) # This enables upgrading non-versioned packages, despite the fact they exist. # Needed by 'mahout' which is non-version but have to be updated self.repo_mgr.upgrade_package(name, installation_context) except Exception as err: ret_code = 1 Logger.logger.error( "Package Manager failed to install packages: {0}".format( str(err))) # Remove already installed packages in case of fail if packages_were_checked and packages_installed_before: packages_installed_after = self.repo_mgr.installed_packages() packages_installed_after = [ package[0] for package in packages_installed_after ] packages_installed_before = set(packages_installed_before) new_packages_installed = [ package for package in packages_installed_after if package not in packages_installed_before ] if OSCheck.is_ubuntu_family(): package_version_string = self.repository_version.replace( '.', '-') else: package_version_string = self.repository_version.replace( '-', '_') package_version_string = package_version_string.replace( '.', '_') for package in new_packages_installed: if package_version_string and (package_version_string in package): self.repo_mgr.remove_package(package, RepoCallContext()) if not self.repo_mgr.verify_dependencies(): ret_code = 1 Logger.logger.error("Failure while verifying dependencies") Logger.logger.error( "*******************************************************************************" ) Logger.logger.error( "Manually verify and fix package dependencies and then re-run install_packages" ) Logger.logger.error( "*******************************************************************************" ) # Compute the actual version in order to save it in structured out try: if ret_code == 0: self.compute_actual_version() else: self.check_partial_install() except Fail as err: ret_code = 1 Logger.logger.exception( "Failure while computing actual version. Error: {0}".format( str(err))) return ret_code
regionserver_heapsize = ensure_unit_for_memory(config['configurations']['hbase-env']['hbase_regionserver_heapsize']) regionserver_xmn_max = config['configurations']['hbase-env']['hbase_regionserver_xmn_max'] regionserver_xmn_percent = expect("/configurations/hbase-env/hbase_regionserver_xmn_ratio", float) regionserver_xmn_size = calc_xmn_from_xms(regionserver_heapsize, regionserver_xmn_percent, regionserver_xmn_max) hbase_regionserver_shutdown_timeout = expect('/configurations/hbase-env/hbase_regionserver_shutdown_timeout', int, 30) phoenix_hosts = default('/clusterHostInfo/phoenix_query_server_hosts', []) phoenix_enabled = default('/configurations/hbase-env/phoenix_sql_enabled', False) has_phoenix = len(phoenix_hosts) > 0 underscored_version = stack_version_unformatted.replace('.', '_') dashed_version = stack_version_unformatted.replace('.', '-') if OSCheck.is_redhat_family() or OSCheck.is_suse_family(): phoenix_package = format("phoenix_{underscored_version}_*") elif OSCheck.is_ubuntu_family(): phoenix_package = format("phoenix-{dashed_version}-.*") pid_dir = status_params.pid_dir tmp_dir = config['configurations']['hbase-site']['hbase.tmp.dir'] local_dir = config['configurations']['hbase-site']['hbase.local.dir'] ioengine_param = default('/configurations/hbase-site/hbase.bucketcache.ioengine', None) client_jaas_config_file = format("{hbase_conf_dir}/hbase_client_jaas.conf") master_jaas_config_file = format("{hbase_conf_dir}/hbase_master_jaas.conf") regionserver_jaas_config_file = format("{hbase_conf_dir}/hbase_regionserver_jaas.conf") queryserver_jaas_config_file = format("{hbase_conf_dir}/hbase_queryserver_jaas.conf") ganglia_server_hosts = default('/clusterHostInfo/ganglia_server_host', []) # is not passed when ganglia is not present has_ganglia_server = not len(ganglia_server_hosts) == 0 if has_ganglia_server:
# for create_hdfs_directory security_enabled = config['configurations']['cluster-env']['security_enabled'] hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab'] hdfs_user = config['configurations']['hadoop-env']['hdfs_user'] hdfs_principal_name = config['configurations']['hadoop-env'][ 'hdfs_principal_name'] smokeuser_principal = config['configurations']['cluster-env'][ 'smokeuser_principal_name'] kinit_path_local = get_kinit_path( default('/configurations/kerberos-env/executable_search_paths', None)) hdfs_site = config['configurations']['hdfs-site'] default_fs = config['configurations']['core-site']['fs.defaultFS'] dfs_type = default("/commandParams/dfs_type", "") # MYSQL if OSCheck.is_ubuntu_family(): mysql_configname = '/etc/mysql/my.cnf' else: mysql_configname = '/etc/my.cnf' daemon_name = status_params.daemon_name # There will always be exactly one mysql_host mysql_host = config['clusterHostInfo']['metron_enrichment_mysql_server_hosts'][ 0] mysql_port = config['configurations']['metron-env'][ 'metron_enrichment_db_port'] mysql_adduser_path = tmp_dir + "/addMysqlUser.sh" mysql_deluser_path = tmp_dir + "/removeMysqlUser.sh" mysql_create_geoip_path = tmp_dir + "/createMysqlGeoIp.sh"