def is_ubuntu_family(self, get_os_family_mock): get_os_family_mock.return_value = "ubuntu" self.assertEqual(OSCheck.is_ubuntu_family(), True) get_os_family_mock.return_value = "troll_os" self.assertEqual(OSCheck.is_ubuntu_family(), False)
def get_postgre_hba_dir(OS_FAMILY): """Return postgre hba dir location depends on OS. Also depends on version of postgres creates symlink like postgresql-->postgresql-9.3 1) /etc/rc.d/init.d/postgresql --> /etc/rc.d/init.d/postgresql-9.3 2) /etc/init.d/postgresql --> /etc/init.d/postgresql-9.1 """ if OSCheck.is_ubuntu_family(): # Like: /etc/postgresql/9.1/main/ return os.path.join(get_pg_hba_init_files(), get_ubuntu_pg_version(), "main") elif OSCheck.is_redhat_family() and int(OSCheck.get_os_major_version()) >= 7: return PG_HBA_ROOT_DEFAULT else: if not os.path.isfile(get_pg_hba_init_files()): # Link: /etc/init.d/postgresql --> /etc/init.d/postgresql-9.1 os.symlink(glob.glob(get_pg_hba_init_files() + '*')[0], get_pg_hba_init_files()) # Get postgres_data location (default: /var/lib/pgsql/data) cmd = "alias exit=return; source " + get_pg_hba_init_files() + " status &>/dev/null; echo $PGDATA" p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) (PG_HBA_ROOT, err) = p.communicate() if PG_HBA_ROOT and len(PG_HBA_ROOT.strip()) > 0: return PG_HBA_ROOT.strip() else: return PG_HBA_ROOT_DEFAULT
def find_repo_files_for_repos(self, repoNames): repoFiles = [] osType = OSCheck.get_os_family() repoNameList = [] for repoName in repoNames: if len(repoName.strip()) > 0: repoNameList.append("[" + repoName + "]") repoNameList.append("name=" + repoName) if repoNameList: # get list of files if OSCheck.is_suse_family(): fileList = self.get_files_in_dir(REPO_PATH_SUSE) elif OSCheck.is_redhat_family(): fileList = self.get_files_in_dir(REPO_PATH_RHEL) elif OSCheck.is_ubuntu_family(): fileList = self.get_files_in_dir(REPO_PATH_UBUNTU) else: logger.warn("Unsupported OS type, cannot get repository location.") return [] if fileList: for filePath in fileList: with open(filePath, 'r') as file: content = file.readline() while (content != "" ): for repoName in repoNameList: if content.find(repoName) == 0 and filePath not in repoFiles: repoFiles.append(filePath) break; content = file.readline() return repoFiles
def getFirewallObject(self): if OSCheck.is_ubuntu_family(): return UbuntuFirewallChecks() elif self.OS_TYPE == OSConst.OS_FEDORA and int(self.OS_VERSION) >= 18: return Fedora18FirewallChecks() elif OSCheck.is_suse_family(): return SuseFirewallChecks() else: return FirewallChecks()
def get_pg_hba_init_files(): if OSCheck.is_ubuntu_family(): return '/etc/postgresql' elif OSCheck.is_redhat_family(): return '/etc/rc.d/init.d/postgresql' elif OSCheck.is_suse_family(): return '/etc/init.d/postgresql' else: raise Exception("Unsupported OS family '{0}'".format(OSCheck.get_os_family()))
def get_erase_cmd(): if OSCheck.is_redhat_family(): return "yum erase -y {0}" elif OSCheck.is_suse_family(): return "zypper -n -q remove {0}" elif OSCheck.is_ubuntu_family(): return "/usr/bin/apt-get -y -q remove {0}" else: raise Exception("Unsupported OS family '{0}', cannot remove package. ".format(OSCheck.get_os_family()))
def getRepoDir(self): if OSCheck.is_redhat_family(): return "/etc/yum.repos.d" elif OSCheck.is_suse_family(): return "/etc/zypp/repos.d" elif OSCheck.is_ubuntu_family(): return "/etc/apt/sources.list.d" else: raise Exception("Unsupported OS family '{0}'".format(OSCheck.get_os_family()))
def isAgentPackageAlreadyInstalled(projectVersion): if OSCheck.is_ubuntu_family(): Command = ["bash", "-c", "dpkg-query -W -f='${Status} ${Version}\n' ambari-agent | grep -v deinstall | grep " + projectVersion] else: Command = ["bash", "-c", "rpm -qa | grep ambari-agent-"+projectVersion] ret = execOsCommand(Command) res = False if ret["exitstatus"] == 0 and ret["log"][0].strip() != "": res = True return res
def installAgent(projectVersion): """ Run install and make sure the agent install alright """ # The command doesn't work with file mask ambari-agent*.rpm, so rename it on agent host if OSCheck.is_suse_family(): Command = ["zypper", "--no-gpg-checks", "install", "-y", "ambari-agent-" + projectVersion] elif OSCheck.is_ubuntu_family(): # add * to end of version in case of some test releases Command = ["apt-get", "install", "-y", "--allow-unauthenticated", "ambari-agent=" + projectVersion + "*"] else: Command = ["yum", "-y", "install", "--nogpgcheck", "ambari-agent-" + projectVersion] return execOsCommand(Command, tries=3, try_sleep=10)
def getAvaliableAgentPackageVersions(): if OSCheck.is_suse_family(): Command = ["bash", "-c", "zypper --no-gpg-checks -q search -s --match-exact ambari-agent | grep ambari-agent | sed -re 's/\s+/ /g' | cut -d '|' -f 4 | tr '\\n' ', ' | sed -s 's/[-|~][A-Za-z0-9]*//g'"] elif OSCheck.is_ubuntu_family(): Command = ["bash", "-c", "apt-cache -q show ambari-agent|grep 'Version\:'|cut -d ' ' -f 2| tr '\\n' ', '|sed -s 's/[-|~][A-Za-z0-9]*//g'"] else: Command = ["bash", "-c", "yum -q list all ambari-agent | grep -E '^ambari-agent' | sed -re 's/\s+/ /g' | cut -d ' ' -f 2 | tr '\\n' ', ' | sed -s 's/[-|~][A-Za-z0-9]*//g'"] return execOsCommand(Command)
def copyNeededFiles(self): # Copying the files fileToCopy = self.getRepoFile() target = self.getRemoteName(self.AMBARI_REPO_FILENAME) self.host_log.write("==========================\n") self.host_log.write("Copying repo file to 'tmp' folder...") params = self.shared_state scp = SCP(params.user, params.sshkey_file, self.host, fileToCopy, target, params.bootdir, self.host_log) retcode1 = scp.run() self.host_log.write("\n") # Move file to repo dir self.host_log.write("==========================\n") self.host_log.write("Moving file to repo dir...") targetDir = self.getRepoDir() command = self.getMoveRepoFileCommand(targetDir) ssh = SSH(params.user, params.sshkey_file, self.host, command, params.bootdir, self.host_log) retcode2 = ssh.run() self.host_log.write("\n") # Change permissions on ambari.repo self.host_log.write("==========================\n") self.host_log.write("Changing permissions for ambari.repo...") command = self.getRepoFileChmodCommand() ssh = SSH(params.user, params.sshkey_file, self.host, command, params.bootdir, self.host_log) retcode4 = ssh.run() self.host_log.write("\n") # Update repo cache for ubuntu OS if OSCheck.is_ubuntu_family(): self.host_log.write("==========================\n") self.host_log.write("Update apt cache of repository...") command = self.getAptUpdateCommand() ssh = SSH(params.user, params.sshkey_file, self.host, command, params.bootdir, self.host_log) retcode2 = ssh.run() self.host_log.write("\n") self.host_log.write("==========================\n") self.host_log.write("Copying setup script file...") fileToCopy = params.setup_agent_file target = self.getRemoteName(self.SETUP_SCRIPT_FILENAME) scp = SCP(params.user, params.sshkey_file, self.host, fileToCopy, target, params.bootdir, self.host_log) retcode3 = scp.run() self.host_log.write("\n") return max(retcode1["exitstatus"], retcode2["exitstatus"], retcode3["exitstatus"], retcode4["exitstatus"])
def format_package_name(self, name): from resource_management.libraries.functions.default import default """ This function replaces ${stack_version} placeholder into actual version. If the package version is passed from the server, use that as an absolute truth. """ # two different command types put things in different objects. WHY. # package_version is the form W_X_Y_Z_nnnn package_version = default("roleParams/package_version", None) if not package_version: package_version = default("hostLevelParams/package_version", None) package_delimiter = '-' if OSCheck.is_ubuntu_family() else '_' # The cluster effective version comes down when the version is known after the initial # install. In that case we should not be guessing which version when invoking INSTALL, but # use the supplied version to build the package_version effective_version = default("commandParams/version", None) role_command = default("roleCommand", None) if (package_version is None or '*' in package_version) \ and effective_version is not None and 'INSTALL' == role_command: package_version = effective_version.replace('.', package_delimiter).replace('-', package_delimiter) Logger.info("Version {0} was provided as effective cluster version. Using package version {1}".format(effective_version, package_version)) if package_version: stack_version_package_formatted = package_version if OSCheck.is_ubuntu_family(): stack_version_package_formatted = package_version.replace('_', package_delimiter) # Wildcards cause a lot of troubles with installing packages, if the version contains wildcards we try to specify it. if not package_version or '*' in package_version: stack_version_package_formatted = self.get_stack_version_before_packages_installed().replace('.', package_delimiter).replace('-', package_delimiter) if STACK_VERSION_PLACEHOLDER in name else name package_name = name.replace(STACK_VERSION_PLACEHOLDER, stack_version_package_formatted) return package_name
def installAgent(projectVersion, ret=None): """ Run install and make sure the agent install alright """ # The command doesn't work with file mask ambari-agent*.rpm, so rename it on agent host if OSCheck.is_suse_family(): Command = ["zypper", "--no-gpg-checks", "install", "-y", "ambari-agent-" + projectVersion] elif OSCheck.is_ubuntu_family(): # add * to end of version in case of some test releases Command = ["apt-get", "install", "-y", "--allow-unauthenticated", "ambari-agent=" + projectVersion + "*"] elif OSCheck.is_windows_family(): packageParams = "/AmbariRoot:" + AMBARI_INSTALL_ROOT Command = ["cmd", "/c", "choco", "install", "-y", "ambari-agent", "--version=" + projectVersion, "--params=\"" + packageParams + "\""] else: Command = ["yum", "-y", "install", "--nogpgcheck", "ambari-agent-" + projectVersion] return execOsCommand(Command, tries=3, try_sleep=10, ret=ret)
def get_ambari_repo_file_full_name(): if OSCheck.is_ubuntu_family(): ambari_repo_file = "/etc/apt/sources.list.d/ambari.list" elif OSCheck.is_redhat_family(): ambari_repo_file = "/etc/yum.repos.d/ambari.repo" elif OSCheck.is_suse_family(): ambari_repo_file = "/etc/zypp/repos.d/ambari.repo" elif OSCheck.is_windows_family(): ambari_repo_file = os.path.join(os.environ[ChocolateyConsts.CHOCOLATEY_INSTALL_VAR_NAME], ChocolateyConsts.CHOCOLATEY_CONFIG_DIR, ChocolateyConsts.CHOCOLATEY_CONFIG_FILENAME) else: raise Exception('Ambari repo file path not set for current OS.') return ambari_repo_file
def getTransparentHugePage(self): thp_regex = "\[(.+)\]" file_name = None if OSCheck.is_ubuntu_family(): file_name = self.THP_FILE_UBUNTU elif OSCheck.is_redhat_family(): file_name = self.THP_FILE_REDHAT if file_name and os.path.isfile(file_name): with open(file_name) as f: file_content = f.read() return re.search(thp_regex, file_content).groups()[0] else: return ""
def setup_atlas_hive(configuration_directory=None): import params if params.has_atlas: if configuration_directory is None: configuration_directory = format("{hive_config_dir}") if not params.host_sys_prepped: Package(params.atlas_ubuntu_plugin_package if OSCheck.is_ubuntu_family() else params.atlas_plugin_package, # FIXME HACK: install the package during RESTART/START when install_packages is not triggered. ) PropertiesFile(format('{configuration_directory}/client.properties'), properties = params.atlas_client_props, owner = params.hive_user, group = params.user_group, mode = 0644)
def __init__(self, host, shared_state): threading.Thread.__init__(self) self.host = host self.shared_state = shared_state self.status = { "start_time": None, "return_code": None, } log_file = os.path.join(self.shared_state.bootdir, self.host + ".log") self.host_log = HostLog(log_file) self.daemon = True if OSCheck.is_ubuntu_family(): self.AMBARI_REPO_FILENAME = self.AMBARI_REPO_FILENAME + ".list" else: self.AMBARI_REPO_FILENAME = self.AMBARI_REPO_FILENAME + ".repo"
def checkSudoPackage(self): """ Checking 'sudo' package on remote host """ self.host_log.write("==========================\n") self.host_log.write("Checking 'sudo' package on remote host...") params = self.shared_state if OSCheck.is_ubuntu_family(): command = "dpkg --get-selections|grep -e '^sudo\s*install'" else: command = "rpm -qa | grep -e '^sudo\-'" ssh = SSH(params.user, params.sshkey_file, self.host, command, params.bootdir, self.host_log, errorMessage="Error: Sudo command is not available. " "Please install the sudo command.") retcode = ssh.run() self.host_log.write("\n") return retcode
def allAvailablePackages(allAvailablePackages): osType = OSCheck.get_os_family() if OSCheck.is_suse_family(): return _lookUpZypperPackages( ["sudo", "zypper", "search", "--uninstalled-only", "--details"], allAvailablePackages) elif OSCheck.is_redhat_family(): return _lookUpYumPackages( ["sudo", "yum", "list", "available"], 'Available Packages', allAvailablePackages) elif OSCheck.is_ubuntu_family(): return _lookUpAptPackages( LIST_AVAILABLE_PACKAGES_UBUNTU, allAvailablePackages)
def findNearestAgentPackageVersion(projectVersion): if projectVersion == "": projectVersion = " " if OSCheck.is_suse_family(): Command = ["bash", "-c", "zypper --no-gpg-checks -q search -s --match-exact ambari-agent | grep '" + projectVersion + "' | cut -d '|' -f 4 | head -n1 | sed -e 's/-\w[^:]*//1' "] elif OSCheck.is_ubuntu_family(): if projectVersion == " ": Command = ["bash", "-c", "apt-cache -q show ambari-agent |grep 'Version\:'|cut -d ' ' -f 2|tr -d '\\n'|sed -s 's/[-|~][A-Za-z0-9]*//'"] else: Command = ["bash", "-c", "apt-cache -q show ambari-agent |grep 'Version\:'|cut -d ' ' -f 2|grep '" + projectVersion + "'|tr -d '\\n'|sed -s 's/[-|~][A-Za-z0-9]*//'"] else: Command = ["bash", "-c", "yum -q list all ambari-agent | grep '" + projectVersion + "' | sed -re 's/\s+/ /g' | cut -d ' ' -f 2 | head -n1 | sed -e 's/-\w[^:]*//1' "] return execOsCommand(Command)
def allInstalledPackages(allInstalledPackages): """ All installed packages in system """ osType = OSCheck.get_os_family() if OSCheck.is_suse_family(): return _lookUpZypperPackages( ["sudo", "zypper", "search", "--installed-only", "--details"], allInstalledPackages) elif OSCheck.is_redhat_family(): return _lookUpYumPackages( ["sudo", "yum", "list", "installed"], 'Installed Packages', allInstalledPackages) elif OSCheck.is_ubuntu_family(): return _lookUpAptPackages( LIST_INSTALLED_PACKAGES_UBUNTU, allInstalledPackages)
def _check_postgre_up(): pg_status, retcode, out, err = PGConfig._get_postgre_status() if pg_status == PGConfig.PG_STATUS_RUNNING: print_info_msg("PostgreSQL is running") return pg_status, 0, out, err else: # run initdb only on non ubuntu systems as ubuntu does not have initdb cmd. if not OSCheck.is_ubuntu_family(): print "Running initdb: This may take upto a minute." retcode, out, err = run_os_command(PGConfig.PG_INITDB_CMD) if retcode == 0: print out print "About to start PostgreSQL" try: process = subprocess.Popen(PGConfig.PG_START_CMD.split(' '), stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE ) if OSCheck.is_suse_family(): time.sleep(20) result = process.poll() print_info_msg("Result of postgres start cmd: " + str(result)) if result is None: process.kill() pg_status, retcode, out, err = PGConfig._get_postgre_status() else: retcode = result else: out, err = process.communicate() retcode = process.returncode pg_status, retcode, out, err = PGConfig._get_postgre_status() if pg_status == PGConfig.PG_STATUS_RUNNING: print_info_msg("Postgres process is running. Returning...") return pg_status, 0, out, err except (Exception), e: pg_status, retcode, out, err = PGConfig._get_postgre_status() if pg_status == PGConfig.PG_STATUS_RUNNING: return pg_status, 0, out, err else: print_error_msg("Postgres start failed. " + str(e)) return pg_status, retcode, out, err
def installAgent(projectVersion): """ Run install and make sure the agent install alright """ # The command doesn't work with file mask ambari-agent*.rpm, so rename it on agent host if OSCheck.is_suse_family(): Command = [ "zypper", "--no-gpg-checks", "install", "-y", "ambari-agent-" + projectVersion ] elif OSCheck.is_ubuntu_family(): # add * to end of version in case of some test releases Command = [ "apt-get", "install", "-y", "--allow-unauthenticated", "ambari-agent=" + projectVersion + "*" ] else: Command = [ "yum", "-y", "install", "--nogpgcheck", "ambari-agent-" + projectVersion ] return execOsCommand(Command, tries=3, try_sleep=10)
def findNearestAgentPackageVersion(projectVersion): if projectVersion == "": projectVersion = " " if OSCheck.is_suse_family(): Command = ["bash", "-c", "zypper --no-gpg-checks -q search -s --match-exact ambari-agent | grep '" + projectVersion + "' | cut -d '|' -f 4 | head -n1 | sed -e 's/-\w[^:]*//1' "] elif OSCheck.is_windows_family(): listPackagesCommand = ["cmd", "/c", "choco list ambari-agent --pre --all | findstr " + projectVersion + " > agentPackages.list"] execOsCommand(listPackagesCommand) Command = ["cmd", "/c", "powershell", "get-content agentPackages.list | select-object -last 1 | foreach-object {$_ -replace 'ambari-agent ', ''}"] elif OSCheck.is_ubuntu_family(): if projectVersion == " ": Command = ["bash", "-c", "apt-cache -q show ambari-agent |grep 'Version\:'|cut -d ' ' -f 2|tr -d '\\n'|sed -s 's/[-|~][A-Za-z0-9]*//'"] else: Command = ["bash", "-c", "apt-cache -q show ambari-agent |grep 'Version\:'|cut -d ' ' -f 2|grep '" + projectVersion + "'|tr -d '\\n'|sed -s 's/[-|~][A-Za-z0-9]*//'"] else: Command = ["bash", "-c", "yum -q list all ambari-agent | grep '" + projectVersion + "' | sed -re 's/\s+/ /g' | cut -d ' ' -f 2 | head -n1 | sed -e 's/-\w[^:]*//1' "] return execOsCommand(Command)
def setup_atlas_hive(configuration_directory=None): import params if params.has_atlas: if configuration_directory is None: configuration_directory = format("{hive_config_dir}") if not params.host_sys_prepped: Package( params.atlas_ubuntu_plugin_package if OSCheck.is_ubuntu_family() else params. atlas_plugin_package, # FIXME HACK: install the package during RESTART/START when install_packages is not triggered. retry_on_repo_unavailability=params. agent_stack_retry_on_unavailability, retry_count=params.agent_stack_retry_count) PropertiesFile(format('{configuration_directory}/{atlas_conf_file}'), properties=params.atlas_props, owner=params.hive_user, group=params.user_group, mode=0644)
def isAgentPackageAlreadyInstalled(projectVersion): if OSCheck.is_ubuntu_family(): Command = [ "bash", "-c", "dpkg-query -W -f='${Status} ${Version}\n' ambari-agent | grep -v deinstall | grep " + projectVersion ] elif OSCheck.is_windows_family(): Command = [ "cmd", "/c", "choco list ambari-agent --local-only | findstr ambari-agent" ] else: Command = [ "bash", "-c", "rpm -qa | grep ambari-agent-" + projectVersion ] ret = execOsCommand(Command) res = False if ret["exitstatus"] == 0 and ret["log"][0].strip() != "": res = True return res
def findNearestAgentPackageVersion(projectVersion): if projectVersion == "": projectVersion = " " if OSCheck.is_suse_family(): Command = [ "bash", "-c", "zypper --no-gpg-checks --non-interactive -q search -s --match-exact ambari-agent | grep '" + projectVersion + "' | cut -d '|' -f 4 | head -n1 | sed -e 's/-\w[^:]*//1' " ] elif OSCheck.is_windows_family(): listPackagesCommand = [ "cmd", "/c", "choco list ambari-agent --pre --all | findstr " + projectVersion + " > agentPackages.list" ] execOsCommand(listPackagesCommand) Command = [ "cmd", "/c", "powershell", "get-content agentPackages.list | select-object -last 1 | foreach-object {$_ -replace 'ambari-agent ', ''}" ] elif OSCheck.is_ubuntu_family(): if projectVersion == " ": Command = [ "bash", "-c", "apt-cache -q show ambari-agent |grep 'Version\:'|cut -d ' ' -f 2|tr -d '\\n'|sed -s 's/[-|~][A-Za-z0-9]*//'" ] else: Command = [ "bash", "-c", "apt-cache -q show ambari-agent |grep 'Version\:'|cut -d ' ' -f 2|grep '" + projectVersion + "'|tr -d '\\n'|sed -s 's/[-|~][A-Za-z0-9]*//'" ] else: Command = [ "bash", "-c", "yum -q list all ambari-agent | grep '" + projectVersion + "' | sed -re 's/\s+/ /g' | cut -d ' ' -f 2 | head -n1 | sed -e 's/-\w[^:]*//1' " ] return execOsCommand(Command)
def get_postgre_hba_dir(OS_FAMILY): """Return postgre hba dir location depends on OS. Also depends on version of postgres creates symlink like postgresql-->postgresql-9.3 1) /etc/rc.d/init.d/postgresql --> /etc/rc.d/init.d/postgresql-9.3 2) /etc/init.d/postgresql --> /etc/init.d/postgresql-9.1 """ if OSCheck.is_ubuntu_family(): # Like: /etc/postgresql/9.1/main/ return os.path.join(get_pg_hba_init_files(), get_ubuntu_pg_version(), "main") elif glob.glob( get_pg_hba_init_files() + '*' ): # this happens when the service file is of old format (not like /usr/lib/systemd/system/postgresql.service) if not os.path.isfile(get_pg_hba_init_files()): # Link: /etc/init.d/postgresql --> /etc/init.d/postgresql-9.1 os.symlink( glob.glob(get_pg_hba_init_files() + '*')[0], get_pg_hba_init_files()) pg_hba_init_basename = os.path.basename(get_pg_hba_init_files()) # Get postgres_data location (default: /var/lib/pgsql/data) cmd = "alias basename='echo {0}; true' ; alias exit=return; source {1} status &>/dev/null; echo $PGDATA".format( pg_hba_init_basename, get_pg_hba_init_files()) p = subprocess32.Popen(cmd, stdout=subprocess32.PIPE, stdin=subprocess32.PIPE, stderr=subprocess32.PIPE, shell=True) (PG_HBA_ROOT, err) = p.communicate() if PG_HBA_ROOT and len(PG_HBA_ROOT.strip()) > 0: return PG_HBA_ROOT.strip() if not os.path.exists(PG_HBA_ROOT_DEFAULT): versioned_dirs = glob.glob(PG_HBA_ROOT_DEFAULT_VERSIONED) if versioned_dirs: return versioned_dirs[0] return PG_HBA_ROOT_DEFAULT
def getAvailableAgentPackageVersions(): if OSCheck.is_suse_family(): Command = [ "bash", "-c", "zypper --no-gpg-checks --non-interactive -q search -s --match-exact ambari-agent | grep ambari-agent | sed -re 's/\s+/ /g' | cut -d '|' -f 4 | tr '\\n' ', ' | sed -s 's/[-|~][A-Za-z0-9]*//g'" ] elif OSCheck.is_windows_family(): Command = [ "cmd", "/c", "choco list ambari-agent --pre --all | findstr ambari-agent" ] elif OSCheck.is_ubuntu_family(): Command = [ "bash", "-c", "apt-cache -q show ambari-agent|grep 'Version\:'|cut -d ' ' -f 2| tr '\\n' ', '|sed -s 's/[-|~][A-Za-z0-9]*//g'" ] else: Command = [ "bash", "-c", "yum -q list all ambari-agent | grep -E '^ambari-agent' | sed -re 's/\s+/ /g' | cut -d ' ' -f 2 | tr '\\n' ', ' | sed -s 's/[-|~][A-Za-z0-9]*//g'" ] return execOsCommand(Command)
def verifyDependencies(): """ Verify that we have no dependency issues in package manager. Dependency issues could appear because of aborted or terminated package installation process or invalid packages state after manual modification of packages list on the host :return True if no dependency issues found, False if dependency issue present :rtype bool """ check_str = None cmd = None if OSCheck.is_redhat_family(): cmd = ['/usr/bin/yum', '-d', '0', '-e', '0', 'check', 'dependencies'] check_str = "has missing requires|Error:" elif OSCheck.is_suse_family(): cmd = [ '/usr/bin/zypper', '--quiet', '--non-interactive', 'verify', '--dry-run' ] check_str = "\d+ new package(s)? to install" elif OSCheck.is_ubuntu_family(): cmd = ['/usr/bin/apt-get', '-qq', 'check'] check_str = "has missing dependency|E:" if check_str is None or cmd is None: raise Fail("Unsupported OSFamily on the Agent Host") code, out = rmf_shell.checked_call(cmd, sudo=True) output_regex = re.compile(check_str) if code or (out and output_regex.search(out)): err_msg = Logger.filter_text( "Failed to verify package dependencies. Execution of '%s' returned %s. %s" % (cmd, code, out)) Logger.error(err_msg) return False return True
def get_available_packages_in_repos(repositories): """ Gets all (both installed and available) packages that are available at given repositories. :param repositories: from command configs like config['repositoryFile']['repositories'] :return: installed and available packages from these repositories """ available_packages = [] installed_packages = [] available_packages_in_repos = [] repo_ids = [repository['repoId'] for repository in repositories] if OSCheck.is_ubuntu_family(): allInstalledPackages(installed_packages) repo_urls = [repository['baseUrl'] for repository in repositories] repo_urls = [repo_url.replace("http://","") for repo_url in repo_urls] repo_urls = [repo_url.replace("/","_") for repo_url in repo_urls] for url in repo_urls: _lookUpAptPackages( LIST_AVAILABLE_PACKAGES_UBUNTU % url, available_packages) for package in installed_packages: if url in package[2]: available_packages_in_repos.append(package[0]) for package in available_packages: available_packages_in_repos.append(package[0]) elif OSCheck.is_suse_family(): for repo in repo_ids: _lookUpZypperPackages(["sudo", "zypper", "--no-gpg-checks", "search", "--details", "--repo", repo], available_packages) available_packages_in_repos += [package[0] for package in available_packages] elif OSCheck.is_redhat_family(): for repo in repo_ids: _lookUpYumPackages(["sudo", "yum", "list", "available", "--disablerepo=*", "--enablerepo=" + repo], 'Available Packages', available_packages) _lookUpYumPackages(["sudo", "yum", "list", "installed", "--disablerepo=*", "--enablerepo=" + repo], 'Installed Packages', installed_packages) available_packages_in_repos += [package[0] for package in available_packages + installed_packages] return available_packages_in_repos
def setup_atlas_sqoop(): import params if params.has_atlas: if not params.host_sys_prepped: Package(params.atlas_ubuntu_plugin_package if OSCheck.is_ubuntu_family() else params.atlas_plugin_package, retry_on_repo_unavailability=params.agent_stack_retry_on_unavailability, retry_count=params.agent_stack_retry_count) PropertiesFile(format('{sqoop_conf_dir}/{atlas_conf_file}'), properties = params.atlas_props, owner = params.sqoop_user, group = params.user_group, mode = 0644) atlas_sqoop_hook_dir = os.path.join(params.atlas_home_dir, "hook", "sqoop") if os.path.exists(atlas_sqoop_hook_dir): src_files = os.listdir(atlas_sqoop_hook_dir) for file_name in src_files: atlas_sqoop_hook_file_name = os.path.join(atlas_sqoop_hook_dir, file_name) sqoop_lib_file_name = os.path.join(params.sqoop_lib, file_name) if (os.path.isfile(atlas_sqoop_hook_file_name)): Link(sqoop_lib_file_name, to = atlas_sqoop_hook_file_name)
hive_metastore_win_service_name = "metastore" hive_client_win_service_name = "hwi" hive_server_win_service_name = "hiveserver2" webhcat_server_win_service_name = "templeton" else: hive_pid_dir = config['configurations']['hive-env']['hive_pid_dir'] hive_pid = 'hive-server.pid' hive_interactive_pid = 'hive-interactive.pid' hive_metastore_pid = 'hive.pid' hcat_pid_dir = config['configurations']['hive-env'][ 'hcat_pid_dir'] #hcat_pid_dir webhcat_pid_file = format('{hcat_pid_dir}/webhcat.pid') process_name = 'mysqld' if OSCheck.is_suse_family() or OSCheck.is_ubuntu_family(): daemon_name = 'mysql' else: daemon_name = 'mysqld' # Security related/required params hostname = config['hostname'] security_enabled = config['configurations']['cluster-env'][ 'security_enabled'] kinit_path_local = get_kinit_path( default('/configurations/kerberos-env/executable_search_paths', None)) tmp_dir = Script.get_tmp_dir() hdfs_user = config['configurations']['hadoop-env']['hdfs_user'] hive_user = config['configurations']['hive-env']['hive_user'] webhcat_user = config['configurations']['hive-env']['webhcat_user']
def get_ntp_service(): if OSCheck.is_redhat_family(): return "ntpd" elif OSCheck.is_suse_family() or OSCheck.is_ubuntu_family(): return "ntp"
def install_atlas_hook_packages(atlas_plugin_package, atlas_ubuntu_plugin_package, host_sys_prepped, agent_stack_retry_on_unavailability, agent_stack_retry_count): if not host_sys_prepped: Package(atlas_ubuntu_plugin_package if OSCheck.is_ubuntu_family() else atlas_plugin_package, retry_on_repo_unavailability=agent_stack_retry_on_unavailability, retry_count=agent_stack_retry_count)
def copyNeededFiles(self): # get the params params = self.shared_state # Copying the files fileToCopy = self.getRepoFile() target = self.getRemoteName(self.AMBARI_REPO_FILENAME) if (os.path.exists(fileToCopy)): self.host_log.write("==========================\n") self.host_log.write("Copying repo file to 'tmp' folder...") scp = SCP(params.user, params.sshkey_file, self.host, fileToCopy, target, params.bootdir, self.host_log) retcode1 = scp.run() self.host_log.write("\n") # Move file to repo dir self.host_log.write("==========================\n") self.host_log.write("Moving file to repo dir...") targetDir = self.getRepoDir() command = self.getMoveRepoFileCommand(targetDir) ssh = SSH(params.user, params.sshkey_file, self.host, command, params.bootdir, self.host_log) retcode2 = ssh.run() self.host_log.write("\n") # Change permissions on ambari.repo self.host_log.write("==========================\n") self.host_log.write("Changing permissions for ambari.repo...") command = self.getRepoFileChmodCommand() ssh = SSH(params.user, params.sshkey_file, self.host, command, params.bootdir, self.host_log) retcode4 = ssh.run() self.host_log.write("\n") # Update repo cache for ubuntu OS if OSCheck.is_ubuntu_family(): self.host_log.write("==========================\n") self.host_log.write("Update apt cache of repository...") command = self.getAptUpdateCommand() ssh = SSH(params.user, params.sshkey_file, self.host, command, params.bootdir, self.host_log) retcode2 = ssh.run() self.host_log.write("\n") retcode = max(retcode1["exitstatus"], retcode2["exitstatus"], retcode4["exitstatus"]) else: self.host_log.write("==========================\n") self.host_log.write("Copying required files...") self.host_log.write("Ambari repo file not found: {0}".format(self.getRepoFile())) retcode = -1 pass # copy the setup script file self.host_log.write("==========================\n") self.host_log.write("Copying setup script file...") fileToCopy = params.setup_agent_file target = self.getRemoteName(self.SETUP_SCRIPT_FILENAME) scp = SCP(params.user, params.sshkey_file, self.host, fileToCopy, target, params.bootdir, self.host_log) retcode3 = scp.run() self.host_log.write("\n") return max(retcode, retcode3["exitstatus"])
def get_postgre_running_status(): """Return postgre running status indicator""" if OSCheck.is_ubuntu_family(): return os.path.join(get_ubuntu_pg_version(), "main") else: return PG_STATUS_RUNNING_DEFAULT
def format_package_name(self, name, repo_version=None): from resource_management.libraries.functions.default import default """ This function replaces ${stack_version} placeholder with actual version. If the package version is passed from the server, use that as an absolute truth. :param name name of the package :param repo_version actual version of the repo currently installing """ if not STACK_VERSION_PLACEHOLDER in name: return name stack_version_package_formatted = "" package_delimiter = '-' if OSCheck.is_ubuntu_family() else '_' # repositoryFile is the truth # package_version should be made to the form W_X_Y_Z_nnnn package_version = default("repositoryFile/repoVersion", None) # TODO remove legacy checks if package_version is None: package_version = default("roleParams/package_version", None) # TODO remove legacy checks if package_version is None: package_version = default("hostLevelParams/package_version", None) package_version = None if (package_version is None or '-' not in package_version) and default('/repositoryFile', None): self.load_available_packages() package_name = self.get_package_from_available(name, self.available_packages_in_repos) if package_name is None: raise Fail("Cannot match package for regexp name {0}. Available packages: {1}".format(name, self.available_packages_in_repos)) return package_name if package_version is not None: package_version = package_version.replace('.', package_delimiter).replace('-', package_delimiter) # The cluster effective version comes down when the version is known after the initial # install. In that case we should not be guessing which version when invoking INSTALL, but # use the supplied version to build the package_version effective_version = default("commandParams/version", None) role_command = default("roleCommand", None) if (package_version is None or '*' in package_version) \ and effective_version is not None and 'INSTALL' == role_command: package_version = effective_version.replace('.', package_delimiter).replace('-', package_delimiter) Logger.info("Version {0} was provided as effective cluster version. Using package version {1}".format(effective_version, package_version)) if package_version: stack_version_package_formatted = package_version if OSCheck.is_ubuntu_family(): stack_version_package_formatted = package_version.replace('_', package_delimiter) # Wildcards cause a lot of troubles with installing packages, if the version contains wildcards we try to specify it. if not package_version or '*' in package_version: repo_version = self.get_stack_version_before_packages_installed() stack_version_package_formatted = repo_version.replace('.', package_delimiter).replace('-', package_delimiter) if STACK_VERSION_PLACEHOLDER in name else name package_name = name.replace(STACK_VERSION_PLACEHOLDER, stack_version_package_formatted) return package_name
if OSCheck.is_windows_family(): hive_metastore_win_service_name = "metastore" hive_client_win_service_name = "hwi" hive_server_win_service_name = "hiveserver2" webhcat_server_win_service_name = "templeton" else: hive_pid_dir = config['configurations']['hive-env']['hive_pid_dir'] hive_pid = 'hive-server.pid' hive_metastore_pid = 'hive.pid' hcat_pid_dir = config['configurations']['hive-env']['hcat_pid_dir'] #hcat_pid_dir webhcat_pid_file = format('{hcat_pid_dir}/webhcat.pid') process_name = 'mysqld' if OSCheck.is_suse_family() or OSCheck.is_ubuntu_family(): daemon_name = 'mysql' else: daemon_name = 'mysqld' # Security related/required params hostname = config['hostname'] security_enabled = config['configurations']['cluster-env']['security_enabled'] kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None)) tmp_dir = Script.get_tmp_dir() hdfs_user = config['configurations']['hadoop-env']['hdfs_user'] hive_user = config['configurations']['hive-env']['hive_user'] webhcat_user = config['configurations']['hive-env']['webhcat_user'] # default configuration directories hadoop_conf_dir = conf_select.get_hadoop_conf_dir()