def actionexecute(self, env): num_errors = 0 # Parse parameters config = Script.get_config() try: command_repository = CommandRepository(config['repositoryFile']) except KeyError: raise Fail( "The command repository indicated by 'repositoryFile' was not found" ) repo_rhel_suse = config['configurations']['cluster-env'][ 'repo_suse_rhel_template'] repo_ubuntu = config['configurations']['cluster-env'][ 'repo_ubuntu_template'] template = repo_rhel_suse if OSCheck.is_redhat_family( ) or OSCheck.is_suse_family() else repo_ubuntu # Handle a SIGTERM and SIGINT gracefully signal.signal(signal.SIGTERM, self.abort_handler) signal.signal(signal.SIGINT, self.abort_handler) self.repository_version = command_repository.version_string # Select dict that contains parameters try: package_list = json.loads(config['roleParams']['package_list']) stack_id = config['roleParams']['stack_id'] except KeyError: pass self.stack_name = Script.get_stack_name() if self.stack_name is None: raise Fail("Cannot determine the stack name") self.stack_root_folder = Script.get_stack_root() if self.stack_root_folder is None: raise Fail("Cannot determine the stack's root directory") if self.repository_version is None: raise Fail("Cannot determine the repository version to install") self.repository_version = self.repository_version.strip() try: if 0 == len(command_repository.repositories): Logger.warning( "Repository list is empty. Ambari may not be managing the repositories for {0}." .format(self.repository_version)) else: Logger.info( "Will install packages for repository version {0}".format( self.repository_version)) create_repo_files(template, command_repository) except Exception, err: Logger.logger.exception( "Cannot install repository files. Error: {0}".format(str(err))) num_errors += 1
def execute(configurations={}, parameters={}, host_name=None): """ Checks if the stack selector such as hdp-select can find versions installed on this host. E.g., hdp-select versions Returns a tuple containing the result code and a pre-formatted result label Keyword arguments: configurations (dictionary): a mapping of configuration key to value parameters (dictionary): a mapping of script parameter key to value host_name (string): the name of this host where the alert is running """ msg = [] try: if configurations is None: return (RESULT_STATE_UNKNOWN, ['There were no configurations supplied to the script.']) # Check required properties if STACK_TOOLS not in configurations: return (RESULT_STATE_UNKNOWN, ['{0} is a required parameter for the script'.format(STACK_TOOLS)]) stack_name = Script.get_stack_name() # Of the form, # { "HDP" : { "stack_selector": ["hdp-select", "/usr/bin/hdp-select", "hdp-select"], "conf_selector": ["conf-select", "/usr/bin/conf-select", "conf-select"] } } stack_tools_str = configurations[STACK_TOOLS] if stack_tools_str is None: return (RESULT_STATE_UNKNOWN, ['{0} is a required parameter for the script and the value is null'.format(STACK_TOOLS)]) distro_select = "unknown-distro-select" try: stack_tools = json.loads(stack_tools_str) stack_tools = stack_tools[stack_name] distro_select = stack_tools["stack_selector"][0] except: pass # This may not exist if the host does not contain any stack components, # or only contains components like Ambari Metrics and SmartSense stack_root_dir = Script.get_stack_root() if os.path.isdir(stack_root_dir): (code, out, versions) = unsafe_get_stack_versions() if code == 0: msg.append("{0} ".format(distro_select)) if versions is not None and type(versions) is list and len(versions) > 0: msg.append("reported the following versions: {0}".format(", ".join(versions))) return (RESULT_STATE_OK, ["\n".join(msg)]) else: msg.append("{0} could not properly read {1}. Check this directory for unexpected contents.".format(distro_select, stack_root_dir)) if out is not None: msg.append(out) return (RESULT_STATE_CRITICAL, ["\n".join(msg)]) else: msg.append("No stack root {0} to check.".format(stack_root_dir)) return (RESULT_STATE_OK, ["\n".join(msg)]) except Exception, e: return (RESULT_STATE_CRITICAL, [e.message])
def get_tarball_paths(name, use_upgrading_version_during_upgrade=True, custom_source_file=None, custom_dest_file=None): """ For a given tarball name, get the source and destination paths to use. :param name: Tarball name :param use_upgrading_version_during_upgrade: :param custom_source_file: If specified, use this source path instead of the default one from the map. :param custom_dest_file: If specified, use this destination path instead of the default one from the map. :return: A tuple of (success status, source path, destination path) """ stack_name = Script.get_stack_name() if not stack_name: Logger.error( "Cannot copy {0} tarball to HDFS because stack name could not be determined." .format(str(name))) return (False, None, None) stack_version = get_current_version(use_upgrading_version_during_upgrade) if not stack_version: Logger.error( "Cannot copy {0} tarball to HDFS because stack version could be be determined." .format(str(name))) return (False, None, None) stack_root = Script.get_stack_root() if not stack_root: Logger.error( "Cannot copy {0} tarball to HDFS because stack root could be be determined." .format(str(name))) return (False, None, None) if name is None or name.lower() not in TARBALL_MAP: Logger.error( "Cannot copy tarball to HDFS because {0} is not supported in stack {1} for this operation." .format(str(name), str(stack_name))) return (False, None, None) (source_file, dest_file) = TARBALL_MAP[name.lower()] if custom_source_file is not None: source_file = custom_source_file if custom_dest_file is not None: dest_file = custom_dest_file source_file = source_file.replace(STACK_NAME_PATTERN, stack_name.lower()) dest_file = dest_file.replace(STACK_NAME_PATTERN, stack_name.lower()) source_file = source_file.replace(STACK_ROOT_PATTERN, stack_root.lower()) dest_file = dest_file.replace(STACK_ROOT_PATTERN, stack_root.lower()) source_file = source_file.replace(STACK_VERSION_PATTERN, stack_version) dest_file = dest_file.replace(STACK_VERSION_PATTERN, stack_version) return (True, source_file, dest_file)
def _get_tarball_paths(name, use_upgrading_version_during_uprade=True, custom_source_file=None, custom_dest_file=None): stack_name = Script.get_stack_name() if not stack_name: Logger.error( "Cannot copy {0} tarball to HDFS because stack name could be be determined." .format(str(name))) return (False, None, None) stack_version = _get_current_version(use_upgrading_version_during_uprade) if not stack_version: Logger.error( "Cannot copy {0} tarball to HDFS because stack version could be be determined." .format(str(name))) return (False, None, None) stack_root = Script.get_stack_root() if not stack_root: Logger.error( "Cannot copy {0} tarball to HDFS because stack root could be be determined." .format(str(name))) return (False, None, None) tarball_map = _get_tarball_map() if not tarball_map: Logger.error( "Cannot copy {0} tarball to HDFS because tarball map could not be determined." .format(str(name), str(stack_name))) if name is None or name.lower() not in tarball_map: Logger.error( "Cannot copy tarball to HDFS because {0} is not supported in stack {1} for this operation." .format(str(name), str(stack_name))) return (False, None, None) (source_file, dest_file) = tarball_map[name.lower()] if custom_source_file is not None: source_file = custom_source_file if custom_dest_file is not None: dest_file = custom_dest_file source_file = source_file.replace(STACK_NAME_PATTERN, stack_name.lower()) dest_file = dest_file.replace(STACK_NAME_PATTERN, stack_name.lower()) source_file = source_file.replace(STACK_ROOT_PATTERN, stack_root.lower()) dest_file = dest_file.replace(STACK_ROOT_PATTERN, stack_root.lower()) source_file = source_file.replace(STACK_VERSION_PATTERN, stack_version) dest_file = dest_file.replace(STACK_VERSION_PATTERN, stack_version) return (True, source_file, dest_file)
def get_tarball_paths(name, use_upgrading_version_during_upgrade=True, custom_source_file=None, custom_dest_file=None): """ For a given tarball name, get the source and destination paths to use. :param name: Tarball name :param use_upgrading_version_during_upgrade: :param custom_source_file: If specified, use this source path instead of the default one from the map. :param custom_dest_file: If specified, use this destination path instead of the default one from the map. :return: A tuple of success status, source path, destination path, optional preparation function which is invoked to setup the tarball """ stack_name = Script.get_stack_name() try: if not stack_name: raise ValueError("Cannot copy {0} tarball to HDFS because stack name could not be determined.".format(str(name))) if name is None or name.lower() not in TARBALL_MAP: raise ValueError("Cannot copy tarball to HDFS because {0} is not supported in stack {1} for this operation.".format(str(name), str(stack_name))) service = TARBALL_MAP[name.lower()] service_name = service['service'] stack_version = get_current_version(service=service_name, use_upgrading_version_during_upgrade=use_upgrading_version_during_upgrade) stack_root = Script.get_stack_root() if not stack_version or not stack_root: raise ValueError("Cannot copy {0} tarball to HDFS because stack version could be be determined.".format(str(name))) source_file, dest_file = service['dirs'] if custom_source_file is not None: source_file = custom_source_file if custom_dest_file is not None: dest_file = custom_dest_file source_file = source_file.replace(STACK_NAME_PATTERN, stack_name.lower()) dest_file = dest_file.replace(STACK_NAME_PATTERN, stack_name.lower()) source_file = source_file.replace(STACK_ROOT_PATTERN, stack_root.lower()) dest_file = dest_file.replace(STACK_ROOT_PATTERN, stack_root.lower()) source_file = source_file.replace(STACK_VERSION_PATTERN, stack_version) dest_file = dest_file.replace(STACK_VERSION_PATTERN, stack_version) prepare_function = service['prepare_function'] if "prepare_function" in service else None except ValueError as e: Logger.error(str(e)) return False, None, None, None return True, source_file, dest_file, prepare_function
def get_tarball_paths(name, use_upgrading_version_during_upgrade=True, custom_source_file=None, custom_dest_file=None): """ For a given tarball name, get the source and destination paths to use. :param name: Tarball name :param use_upgrading_version_during_upgrade: :param custom_source_file: If specified, use this source path instead of the default one from the map. :param custom_dest_file: If specified, use this destination path instead of the default one from the map. :return: A tuple of (success status, source path, destination path) """ stack_name = Script.get_stack_name() if not stack_name: Logger.error("Cannot copy {0} tarball to HDFS because stack name could not be determined.".format(str(name))) return (False, None, None) stack_version = get_current_version(use_upgrading_version_during_upgrade) if not stack_version: Logger.error("Cannot copy {0} tarball to HDFS because stack version could be be determined.".format(str(name))) return (False, None, None) stack_root = Script.get_stack_root() if not stack_root: Logger.error("Cannot copy {0} tarball to HDFS because stack root could be be determined.".format(str(name))) return (False, None, None) if name is None or name.lower() not in TARBALL_MAP: Logger.error("Cannot copy tarball to HDFS because {0} is not supported in stack {1} for this operation.".format(str(name), str(stack_name))) return (False, None, None) (source_file, dest_file) = TARBALL_MAP[name.lower()] if custom_source_file is not None: source_file = custom_source_file if custom_dest_file is not None: dest_file = custom_dest_file source_file = source_file.replace(STACK_NAME_PATTERN, stack_name.lower()) dest_file = dest_file.replace(STACK_NAME_PATTERN, stack_name.lower()) source_file = source_file.replace(STACK_ROOT_PATTERN, stack_root.lower()) dest_file = dest_file.replace(STACK_ROOT_PATTERN, stack_root.lower()) source_file = source_file.replace(STACK_VERSION_PATTERN, stack_version) dest_file = dest_file.replace(STACK_VERSION_PATTERN, stack_version) return (True, source_file, dest_file)
def convert_conf_directories_to_symlinks(package, version, dirs): """ Reverses the symlinks created by the package installer and invokes the conf-select tool to create versioned configuration directories for the given package. If the package does not exist, then no work is performed. - Creates /etc/<component>/<version>/0 via <conf-selector-tool> - Creates a /etc/<component>/conf.backup directory, if needed - Copies all configs from /etc/<component>/conf to conf.backup, if needed - Removes /etc/<component>/conf, if needed - <stack-root>/current/<component>-client/conf -> /etc/<component>/<version>/0 via <conf-selector-tool> - Links /etc/<component>/conf -> <stack-root>/current/[component]-client/conf :param package: the package to create symlinks for (zookeeper, falcon, etc) :param version: the version number to use with <conf-selector-tool> (2.3.0.0-1234) :param dirs: the directories associated with the package (from get_package_dirs()) """ # if the conf_dir doesn't exist, then that indicates that the package's service is not installed # on this host and nothing should be done with conf symlinks stack_name = Script.get_stack_name() for directory_struct in dirs: if not os.path.exists(directory_struct['conf_dir']): Logger.info("Skipping the conf-select tool on {0} since {1} does not exist.".format( package, directory_struct['conf_dir'])) return # determine which directories would be created, if any are needed dry_run_directory = create(stack_name, package, version, dry_run = True) need_dirs = [] for d in dry_run_directory: if not os.path.exists(d): need_dirs.append(d) # log that we'll actually be creating some directories soon if len(need_dirs) > 0: Logger.info("Package {0} will have the following new configuration directories created: {1}".format( package, ", ".join(dry_run_directory))) # Create the versioned /etc/[component]/[version]/0 folder (using create-conf-dir) and then # set it for the installed component: # - Creates /etc/<component>/<version>/0 # - Links <stack-root>/<version>/<component>/conf -> /etc/<component>/<version>/0 select(stack_name, package, version, ignore_errors = True) # check every existing link to see if it's a link and if it's pointed to the right spot for directory_struct in dirs: try: # check if conf is a link already old_conf = directory_struct['conf_dir'] current_dir = directory_struct['current_dir'] if os.path.islink(old_conf): # it's already a link; make sure it's a link to where we want it if os.readlink(old_conf) != current_dir: # the link isn't to the right spot; re-link it Logger.info("Re-linking symlink {0} to {1}".format(old_conf, current_dir)) Link(old_conf, action = "delete") Link(old_conf, to = current_dir) else: Logger.info("{0} is already linked to {1}".format(old_conf, current_dir)) elif os.path.isdir(old_conf): # the /etc/<component>/conf directory is not a link, so turn it into one Logger.info("{0} is a directory - it must be converted into a symlink".format(old_conf)) backup_dir = _get_backup_conf_directory(old_conf) Logger.info("Backing up {0} to {1} if destination doesn't exist already.".format(old_conf, backup_dir)) Execute(("cp", "-R", "-p", old_conf, backup_dir), not_if = format("test -e {backup_dir}"), sudo = True) # delete the old /etc/<component>/conf directory now that it's been backed up Directory(old_conf, action = "delete") # link /etc/[component]/conf -> <stack-root>/current/[component]-client/conf Link(old_conf, to = current_dir) else: # missing entirely # /etc/<component>/conf -> <stack-root>/current/<component>/conf if package in ["atlas", ]: # HACK for Atlas ''' In the case of Atlas, the Hive RPM installs /usr/$stack/$version/atlas with some partial packages that contain Hive hooks, while the Atlas RPM is responsible for installing the full content. If the user does not have Atlas currently installed on their stack, then /usr/$stack/current/atlas-client will be a broken symlink, and we should not create the symlink /etc/atlas/conf -> /usr/$stack/current/atlas-client/conf . If we mistakenly create this symlink, then when the user performs an EU/RU and then adds Atlas service then the Atlas RPM will not be able to copy its artifacts into /etc/atlas/conf directory and therefore prevent Ambari from by copying those unmanaged contents into /etc/atlas/$version/0 ''' component_list = default("/localComponents", []) if "ATLAS_SERVER" in component_list or "ATLAS_CLIENT" in component_list: Logger.info("Atlas is installed on this host.") parent_dir = os.path.dirname(current_dir) if os.path.exists(parent_dir): Link(old_conf, to = current_dir) else: Logger.info( "Will not create symlink from {0} to {1} because the destination's parent dir does not exist.".format( old_conf, current_dir)) else: Logger.info( "Will not create symlink from {0} to {1} because Atlas is not installed on this host.".format( old_conf, current_dir)) else: # Normal path for other packages Link(old_conf, to = current_dir) except Exception, e: Logger.warning("Could not change symlink for package {0} to point to current directory. Error: {1}".format(package, e))
def actionexecute(self, env): num_errors = 0 # Parse parameters config = Script.get_config() try: command_repository = CommandRepository(config['repositoryFile']) except KeyError: raise Fail( "The command repository indicated by 'repositoryFile' was not found" ) # Handle a SIGTERM and SIGINT gracefully signal.signal(signal.SIGTERM, self.abort_handler) signal.signal(signal.SIGINT, self.abort_handler) self.repository_version = command_repository.version_string # Select dict that contains parameters try: package_list = json.loads(config['roleParams']['package_list']) stack_id = config['roleParams']['stack_id'] except KeyError: pass self.stack_name = Script.get_stack_name() if self.stack_name is None: raise Fail("Cannot determine the stack name") self.stack_root_folder = Script.get_stack_root() if self.stack_root_folder is None: raise Fail("Cannot determine the stack's root directory") if self.repository_version is None: raise Fail("Cannot determine the repository version to install") self.repository_version = self.repository_version.strip() try: if not command_repository.items: Logger.warning( "Repository list is empty. Ambari may not be managing the repositories for {0}." .format(self.repository_version)) else: Logger.info( "Will install packages for repository version {0}".format( self.repository_version)) new_repo_files = Script.repository_util.create_repo_files() self.repo_files.update(new_repo_files) except Exception as err: Logger.logger.exception( "Cannot install repository files. Error: {0}".format(str(err))) num_errors += 1 # Build structured output with initial values self.structured_output = { 'package_installation_result': 'FAIL', 'repository_version_id': command_repository.version_id } self.put_structured_out(self.structured_output) try: # check package manager non-completed transactions if self.repo_mgr.check_uncompleted_transactions(): self.repo_mgr.print_uncompleted_transaction_hint() num_errors += 1 except Exception as e: # we need to ignore any exception Logger.warning( "Failed to check for uncompleted package manager transactions: " + str(e)) if num_errors > 0: raise Fail("Failed to distribute repositories/install packages") # Initial list of versions, used to compute the new version installed self.old_versions = get_stack_versions(self.stack_root_folder) try: is_package_install_successful = False ret_code = self.install_packages(package_list) if ret_code == 0: self.structured_output[ 'package_installation_result'] = 'SUCCESS' self.put_structured_out(self.structured_output) is_package_install_successful = True else: num_errors += 1 except Exception as err: num_errors += 1 Logger.logger.exception( "Could not install packages. Error: {0}".format(str(err))) # Provide correct exit code if num_errors > 0: raise Fail("Failed to distribute repositories/install packages") self._fix_default_links_for_current() # if installing a version of HDP that needs some symlink love, then create them if is_package_install_successful and 'actual_version' in self.structured_output: self._relink_configurations_with_conf_select( stack_id, self.structured_output['actual_version'])
def convert_conf_directories_to_symlinks(package, version, dirs, skip_existing_links=True, link_to="current"): """ Assumes HDP 2.3+, moves around directories and creates the conf symlink for the given package. If the package does not exist, then no work is performed. - Creates a /etc/<component>/conf.backup directory - Copies all configs from /etc/<component>/conf to conf.backup - Removes /etc/<component>/conf - Creates /etc/<component>/<version>/0 via <conf-selector-tool> - <stack-root>/current/<component>-client/conf -> /etc/<component>/<version>/0 via <conf-selector-tool> - Links /etc/<component>/conf to <something> depending on function paramter -- /etc/<component>/conf -> <stack-root>/current/[component]-client/conf (usually) -- /etc/<component>/conf -> /etc/<component>/conf.backup (only when supporting < HDP 2.3) :param package: the package to create symlinks for (zookeeper, falcon, etc) :param version: the version number to use with <conf-selector-tool> (2.3.0.0-1234) :param dirs: the directories associated with the package (from get_package_dirs()) :param skip_existing_links: True to not do any work if already a symlink :param link_to: link to "current" or "backup" """ stack_name = Script.get_stack_name() bad_dirs = [] for dir_def in dirs: if not os.path.exists(dir_def['conf_dir']): bad_dirs.append(dir_def['conf_dir']) if len(bad_dirs) > 0: Logger.info("Skipping {0} as it does not exist.".format( ",".join(bad_dirs))) return # existing links should be skipped since we assume there's no work to do if skip_existing_links: bad_dirs = [] for dir_def in dirs: # check if conf is a link already old_conf = dir_def['conf_dir'] if os.path.islink(old_conf): Logger.info("{0} is already linked to {1}".format( old_conf, os.path.realpath(old_conf))) bad_dirs.append(old_conf) if len(bad_dirs) > 0: return # make backup dir and copy everything in case configure() was called after install() backup_dir = None for dir_def in dirs: old_conf = dir_def['conf_dir'] old_parent = os.path.abspath(os.path.join(old_conf, os.pardir)) backup_dir = os.path.join(old_parent, "conf.backup") Logger.info( "Backing up {0} to {1} if destination doesn't exist already.". format(old_conf, backup_dir)) Execute(("cp", "-R", "-p", old_conf, backup_dir), not_if=format("test -e {backup_dir}"), sudo=True) # we're already in the HDP stack # Create the versioned /etc/[component]/[version]/0 folder. # The component must be installed on the host. versioned_confs = create(stack_name, package, version, dry_run=True) Logger.info("Package {0} will have new conf directories: {1}".format( package, ", ".join(versioned_confs))) need_dirs = [] for d in versioned_confs: if not os.path.exists(d): need_dirs.append(d) if len(need_dirs) > 0: create(stack_name, package, version) # find the matching definition and back it up (not the most efficient way) ONLY if there is more than one directory if len(dirs) > 1: for need_dir in need_dirs: for dir_def in dirs: if 'prefix' in dir_def and need_dir.startswith( dir_def['prefix']): old_conf = dir_def['conf_dir'] versioned_conf = need_dir Execute(as_sudo([ "cp", "-R", "-p", os.path.join(old_conf, "*"), versioned_conf ], auto_escape=False), only_if=format("ls -d {old_conf}/*")) elif 1 == len(dirs) and 1 == len(need_dirs): old_conf = dirs[0]['conf_dir'] versioned_conf = need_dirs[0] Execute(as_sudo([ "cp", "-R", "-p", os.path.join(old_conf, "*"), versioned_conf ], auto_escape=False), only_if=format("ls -d {old_conf}/*")) # <stack-root>/current/[component] is already set to to the correct version, e.g., <stack-root>/[version]/[component] select(stack_name, package, version, ignore_errors=True) # Symlink /etc/[component]/conf to /etc/[component]/conf.backup try: # No more references to /etc/[component]/conf for dir_def in dirs: # E.g., /etc/[component]/conf new_symlink = dir_def['conf_dir'] # Remove new_symlink to pave the way, but only if it's a directory if not os.path.islink(new_symlink): Directory(new_symlink, action="delete") if link_to in ["current", "backup"]: # link /etc/[component]/conf -> <stack-root>/current/[component]-client/conf if link_to == "backup": Link(new_symlink, to=backup_dir) else: Link(new_symlink, to=dir_def['current_dir']) else: Logger.error( "Unsupported 'link_to' argument. Could not link package {0}" .format(package)) except Exception, e: Logger.warning( "Could not change symlink for package {0} to point to {1} directory. Error: {2}" .format(package, link_to, e))
def convert_conf_directories_to_symlinks(package, version, dirs, skip_existing_links=True, link_to=DIRECTORY_TYPE_CURRENT): """ Assumes HDP 2.3+, moves around directories and creates the conf symlink for the given package. If the package does not exist, then no work is performed. - Creates a /etc/<component>/conf.backup directory - Copies all configs from /etc/<component>/conf to conf.backup - Removes /etc/<component>/conf - Creates /etc/<component>/<version>/0 via <conf-selector-tool> - <stack-root>/current/<component>-client/conf -> /etc/<component>/<version>/0 via <conf-selector-tool> - Links /etc/<component>/conf to <something> depending on function paramter -- /etc/<component>/conf -> <stack-root>/current/[component]-client/conf (usually) -- /etc/<component>/conf -> /etc/<component>/conf.backup (only when supporting < HDP 2.3) :param package: the package to create symlinks for (zookeeper, falcon, etc) :param version: the version number to use with <conf-selector-tool> (2.3.0.0-1234) :param dirs: the directories associated with the package (from get_package_dirs()) :param skip_existing_links: True to not do any work if already a symlink :param link_to: link to "current" or "backup" """ # lack of enums makes this possible - we need to know what to link to if link_to not in [DIRECTORY_TYPE_CURRENT, DIRECTORY_TYPE_BACKUP]: raise Fail( "Unsupported 'link_to' argument. Could not link package {0}". format(package)) stack_name = Script.get_stack_name() bad_dirs = [] for dir_def in dirs: if not os.path.exists(dir_def['conf_dir']): bad_dirs.append(dir_def['conf_dir']) if len(bad_dirs) > 0: Logger.info("Skipping {0} as it does not exist.".format( ",".join(bad_dirs))) return # existing links should be skipped since we assume there's no work to do # they should be checked against the correct target though if skip_existing_links: bad_dirs = [] for dir_def in dirs: # check if conf is a link already old_conf = dir_def['conf_dir'] if os.path.islink(old_conf): # it's already a link; make sure it's a link to where we want it if link_to == DIRECTORY_TYPE_BACKUP: target_conf_dir = _get_backup_conf_directory(old_conf) else: target_conf_dir = dir_def['current_dir'] # the link isn't to the right spot; re-link it if os.readlink(old_conf) != target_conf_dir: Logger.info("Re-linking symlink {0} to {1}".format( old_conf, target_conf_dir)) Link(old_conf, action="delete") Link(old_conf, to=target_conf_dir) else: Logger.info("{0} is already linked to {1}".format( old_conf, os.path.realpath(old_conf))) bad_dirs.append(old_conf) if len(bad_dirs) > 0: return # make backup dir and copy everything in case configure() was called after install() for dir_def in dirs: old_conf = dir_def['conf_dir'] backup_dir = _get_backup_conf_directory(old_conf) Logger.info( "Backing up {0} to {1} if destination doesn't exist already.". format(old_conf, backup_dir)) Execute(("cp", "-R", "-p", old_conf, backup_dir), not_if=format("test -e {backup_dir}"), sudo=True) # we're already in the HDP stack # Create the versioned /etc/[component]/[version]/0 folder. # The component must be installed on the host. versioned_confs = create(stack_name, package, version, dry_run=True) Logger.info("Package {0} will have new conf directories: {1}".format( package, ", ".join(versioned_confs))) need_dirs = [] for d in versioned_confs: if not os.path.exists(d): need_dirs.append(d) if len(need_dirs) > 0: create(stack_name, package, version) # find the matching definition and back it up (not the most efficient way) ONLY if there is more than one directory if len(dirs) > 1: for need_dir in need_dirs: for dir_def in dirs: if 'prefix' in dir_def and need_dir.startswith( dir_def['prefix']): old_conf = dir_def['conf_dir'] versioned_conf = need_dir Execute(as_sudo([ "cp", "-R", "-p", os.path.join(old_conf, "*"), versioned_conf ], auto_escape=False), only_if=format("ls -d {old_conf}/*")) elif 1 == len(dirs) and 1 == len(need_dirs): old_conf = dirs[0]['conf_dir'] versioned_conf = need_dirs[0] Execute(as_sudo([ "cp", "-R", "-p", os.path.join(old_conf, "*"), versioned_conf ], auto_escape=False), only_if=format("ls -d {old_conf}/*")) # <stack-root>/current/[component] is already set to to the correct version, e.g., <stack-root>/[version]/[component] select(stack_name, package, version, ignore_errors=True) # Symlink /etc/[component]/conf to /etc/[component]/conf.backup try: # No more references to /etc/[component]/conf for dir_def in dirs: # E.g., /etc/[component]/conf new_symlink = dir_def['conf_dir'] # Delete the existing directory/link so that linking will work if not os.path.islink(new_symlink): Directory(new_symlink, action="delete") else: Link(new_symlink, action="delete") old_conf = dir_def['conf_dir'] backup_dir = _get_backup_conf_directory(old_conf) # link /etc/[component]/conf -> /etc/[component]/conf.backup # or # link /etc/[component]/conf -> <stack-root>/current/[component]-client/conf if link_to == DIRECTORY_TYPE_BACKUP: Link(new_symlink, to=backup_dir) else: Link(new_symlink, to=dir_def['current_dir']) #HACK if package in [ "atlas", ]: Logger.info( "Seeding the new conf symlink {0} from the old backup directory {1} in case any " "unmanaged artifacts are needed.".format( new_symlink, backup_dir)) # If /etc/[component]/conf.backup exists, then copy any artifacts not managed by Ambari to the new symlink target # Be careful not to clobber any existing files. Execute(as_sudo([ "cp", "-R", "--no-clobber", os.path.join(backup_dir, "*"), new_symlink ], auto_escape=False), only_if=format("test -e {new_symlink}")) except Exception, e: Logger.warning( "Could not change symlink for package {0} to point to {1} directory. Error: {2}" .format(package, link_to, e))
def actionexecute(self, env): num_errors = 0 # Parse parameters config = Script.get_config() repo_rhel_suse = config['configurations']['cluster-env']['repo_suse_rhel_template'] repo_ubuntu = config['configurations']['cluster-env']['repo_ubuntu_template'] template = repo_rhel_suse if OSCheck.is_redhat_family() or OSCheck.is_suse_family() else repo_ubuntu # Handle a SIGTERM and SIGINT gracefully signal.signal(signal.SIGTERM, self.abort_handler) signal.signal(signal.SIGINT, self.abort_handler) self.repository_version_id = None # Select dict that contains parameters try: self.repository_version = config['roleParams']['repository_version'] base_urls = json.loads(config['roleParams']['base_urls']) package_list = json.loads(config['roleParams']['package_list']) stack_id = config['roleParams']['stack_id'] if 'repository_version_id' in config['roleParams']: self.repository_version_id = config['roleParams']['repository_version_id'] except KeyError: # Last try self.repository_version = config['commandParams']['repository_version'] base_urls = json.loads(config['commandParams']['base_urls']) package_list = json.loads(config['commandParams']['package_list']) stack_id = config['commandParams']['stack_id'] if 'repository_version_id' in config['commandParams']: self.repository_version_id = config['commandParams']['repository_version_id'] # current stack information self.current_stack_version_formatted = None if 'stack_version' in config['hostLevelParams']: current_stack_version_unformatted = str(config['hostLevelParams']['stack_version']) self.current_stack_version_formatted = format_stack_version(current_stack_version_unformatted) self.stack_name = Script.get_stack_name() if self.stack_name is None: raise Fail("Cannot determine the stack name") self.stack_root_folder = Script.get_stack_root() if self.stack_root_folder is None: raise Fail("Cannot determine the stack's root directory") if self.repository_version is None: raise Fail("Cannot determine the repository version to install") self.repository_version = self.repository_version.strip() # Install/update repositories installed_repositories = [] self.current_repositories = [] self.current_repo_files = set() # Enable base system repositories # We don't need that for RHEL family, because we leave all repos enabled # except disabled HDP* ones if OSCheck.is_suse_family(): self.current_repositories.append('base') elif OSCheck.is_ubuntu_family(): self.current_repo_files.add('base') Logger.info("Will install packages for repository version {0}".format(self.repository_version)) if 0 == len(base_urls): Logger.warning("Repository list is empty. Ambari may not be managing the repositories for {0}.".format(self.repository_version)) try: append_to_file = False for url_info in base_urls: repo_name, repo_file = self.install_repository(url_info, append_to_file, template) self.current_repositories.append(repo_name) self.current_repo_files.add(repo_file) append_to_file = True installed_repositories = list_ambari_managed_repos(self.stack_name) except Exception, err: Logger.logger.exception("Cannot distribute repositories. Error: {0}".format(str(err))) num_errors += 1
def convert_conf_directories_to_symlinks(package, version, dirs, skip_existing_links=True, link_to=DIRECTORY_TYPE_CURRENT): """ Assumes HDP 2.3+, moves around directories and creates the conf symlink for the given package. If the package does not exist, then no work is performed. - Creates a /etc/<component>/conf.backup directory - Copies all configs from /etc/<component>/conf to conf.backup - Removes /etc/<component>/conf - Creates /etc/<component>/<version>/0 via <conf-selector-tool> - <stack-root>/current/<component>-client/conf -> /etc/<component>/<version>/0 via <conf-selector-tool> - Links /etc/<component>/conf to <something> depending on function paramter -- /etc/<component>/conf -> <stack-root>/current/[component]-client/conf (usually) -- /etc/<component>/conf -> /etc/<component>/conf.backup (only when supporting < HDP 2.3) :param package: the package to create symlinks for (zookeeper, falcon, etc) :param version: the version number to use with <conf-selector-tool> (2.3.0.0-1234) :param dirs: the directories associated with the package (from get_package_dirs()) :param skip_existing_links: True to not do any work if already a symlink :param link_to: link to "current" or "backup" """ # lack of enums makes this possible - we need to know what to link to if link_to not in [DIRECTORY_TYPE_CURRENT, DIRECTORY_TYPE_BACKUP]: raise Fail("Unsupported 'link_to' argument. Could not link package {0}".format(package)) stack_name = Script.get_stack_name() bad_dirs = [] for dir_def in dirs: if not os.path.exists(dir_def['conf_dir']): bad_dirs.append(dir_def['conf_dir']) if len(bad_dirs) > 0: Logger.info("Skipping {0} as it does not exist.".format(",".join(bad_dirs))) return # existing links should be skipped since we assume there's no work to do # they should be checked against the correct target though if skip_existing_links: bad_dirs = [] for dir_def in dirs: # check if conf is a link already old_conf = dir_def['conf_dir'] if os.path.islink(old_conf): # it's already a link; make sure it's a link to where we want it if link_to == DIRECTORY_TYPE_BACKUP: target_conf_dir = _get_backup_conf_directory(old_conf) else: target_conf_dir = dir_def['current_dir'] # the link isn't to the right spot; re-link it if os.readlink(old_conf) != target_conf_dir: Logger.info("Re-linking symlink {0} to {1}".format(old_conf, target_conf_dir)) Link(old_conf, action = "delete") Link(old_conf, to = target_conf_dir) else: Logger.info("{0} is already linked to {1}".format(old_conf, os.path.realpath(old_conf))) bad_dirs.append(old_conf) if len(bad_dirs) > 0: return # make backup dir and copy everything in case configure() was called after install() for dir_def in dirs: old_conf = dir_def['conf_dir'] backup_dir = _get_backup_conf_directory(old_conf) Logger.info("Backing up {0} to {1} if destination doesn't exist already.".format(old_conf, backup_dir)) Execute(("cp", "-R", "-p", old_conf, backup_dir), not_if = format("test -e {backup_dir}"), sudo = True) # we're already in the HDP stack # Create the versioned /etc/[component]/[version]/0 folder. # The component must be installed on the host. versioned_confs = create(stack_name, package, version, dry_run = True) Logger.info("Package {0} will have new conf directories: {1}".format(package, ", ".join(versioned_confs))) need_dirs = [] for d in versioned_confs: if not os.path.exists(d): need_dirs.append(d) if len(need_dirs) > 0: create(stack_name, package, version) # find the matching definition and back it up (not the most efficient way) ONLY if there is more than one directory if len(dirs) > 1: for need_dir in need_dirs: for dir_def in dirs: if 'prefix' in dir_def and need_dir.startswith(dir_def['prefix']): old_conf = dir_def['conf_dir'] versioned_conf = need_dir Execute(as_sudo(["cp", "-R", "-p", os.path.join(old_conf, "*"), versioned_conf], auto_escape=False), only_if = format("ls -d {old_conf}/*")) elif 1 == len(dirs) and 1 == len(need_dirs): old_conf = dirs[0]['conf_dir'] versioned_conf = need_dirs[0] Execute(as_sudo(["cp", "-R", "-p", os.path.join(old_conf, "*"), versioned_conf], auto_escape=False), only_if = format("ls -d {old_conf}/*")) # <stack-root>/current/[component] is already set to to the correct version, e.g., <stack-root>/[version]/[component] select(stack_name, package, version, ignore_errors = True) # Symlink /etc/[component]/conf to /etc/[component]/conf.backup try: # No more references to /etc/[component]/conf for dir_def in dirs: # E.g., /etc/[component]/conf new_symlink = dir_def['conf_dir'] # Delete the existing directory/link so that linking will work if not os.path.islink(new_symlink): Directory(new_symlink, action = "delete") else: Link(new_symlink, action = "delete") # link /etc/[component]/conf -> /etc/[component]/conf.backup # or # link /etc/[component]/conf -> <stack-root>/current/[component]-client/conf if link_to == DIRECTORY_TYPE_BACKUP: old_conf = dir_def['conf_dir'] backup_dir = _get_backup_conf_directory(old_conf) Link(new_symlink, to = backup_dir) else: Link(new_symlink, to = dir_def['current_dir']) except Exception, e: Logger.warning("Could not change symlink for package {0} to point to {1} directory. Error: {2}".format(package, link_to, e))