Esempio n. 1
0
  def actionexecute(self, env):
    config = Script.get_config()

    version = default('/commandParams/version', None)
    stack_name = default('/hostLevelParams/stack_name', "")

    if not version:
      raise Fail("Value is required for '/commandParams/version'")
  
    # other os?
    if OSCheck.is_redhat_family():
      cmd = ('/usr/bin/yum', 'clean', 'all')
      code, out = shell.call(cmd, sudo=True)

    real_ver = format_stack_version(version)
    if real_ver and check_stack_feature(StackFeature.ROLLING_UPGRADE, real_ver):
      stack_selector_path = stack_tools.get_stack_tool_path(stack_tools.STACK_SELECTOR_NAME)
      cmd = ('ambari-python-wrap', stack_selector_path, 'set', 'all', version)
      code, out = shell.call(cmd, sudo=True)
      if code != 0:
        raise Exception("Command '{0}' exit code is nonzero".format(cmd))

    if real_ver and check_stack_feature(StackFeature.CONFIG_VERSIONING, real_ver):
      # backup the old and symlink /etc/[component]/conf to <stack-root>/current/[component]
      for k, v in conf_select.get_package_dirs().iteritems():
        for dir_def in v:
          link_config(dir_def['conf_dir'], dir_def['current_dir'])
Esempio n. 2
0
    def actionexecute(self, env):
        config = Script.get_config()

        version = default('/commandParams/version', None)
        stack_name = default('/hostLevelParams/stack_name', "")

        if not version:
            raise Fail("Value is required for '/commandParams/version'")

        # other os?
        if OSCheck.is_redhat_family():
            cmd = ('/usr/bin/yum', 'clean', 'all')
            code, out = shell.call(cmd, sudo=True)

        min_ver = format_stack_version("2.2")
        real_ver = format_stack_version(version)
        if stack_name == "HDP":
            if compare_versions(real_ver, min_ver) >= 0:
                stack_selector_path = stack_tools.get_stack_tool_path(
                    stack_tools.STACK_SELECTOR_NAME)
                cmd = ('ambari-python-wrap', stack_selector_path, 'set', 'all',
                       version)
                code, out = shell.call(cmd, sudo=True)

            if compare_versions(real_ver, format_stack_version("2.3")) >= 0:
                # backup the old and symlink /etc/[component]/conf to <stack-root>/current/[component]
                for k, v in conf_select.get_package_dirs().iteritems():
                    for dir_def in v:
                        link_config(dir_def['conf_dir'],
                                    dir_def['current_dir'])
Esempio n. 3
0
  def _create_config_links_if_necessary(self, stack_id, stack_version):
    """
    Sets up the required structure for /etc/<component>/conf symlinks and <stack-root>/current
    configuration symlinks IFF the current stack is < HDP 2.3+ and the new stack is >= HDP 2.3

    stack_id:  stack id, ie HDP-2.3
    stack_version:  version to set, ie 2.3.0.0-1234
    """
    if stack_id is None:
      Logger.info("Cannot create config links when stack_id is not defined")
      return

    args = stack_id.upper().split('-')
    if len(args) != 2:
      Logger.info("Unrecognized stack id {0}, cannot create config links".format(stack_id))
      return

    target_stack_version = args[1]
    if not (target_stack_version and check_stack_feature(StackFeature.CONFIG_VERSIONING, target_stack_version)):
      Logger.info("Configuration symlinks are not needed for {0}".format(stack_version))
      return

    # After upgrading hdf-select package from HDF-2.X to HDF-3.Y, we need to create this symlink
    if self.stack_name.upper() == "HDF" \
            and not os.path.exists("/usr/bin/conf-select") and os.path.exists("/usr/bin/hdfconf-select"):
      Link("/usr/bin/conf-select", to = "/usr/bin/hdfconf-select")

    for package_name, directories in conf_select.get_package_dirs().iteritems():
      conf_selector_name = stack_tools.get_stack_tool_name(stack_tools.CONF_SELECTOR_NAME)
      Logger.info("The current cluster stack of {0} does not require backing up configurations; "
                  "only {1} versioned config directories will be created.".format(stack_version, conf_selector_name))
      # only link configs for all known packages
      conf_select.select(self.stack_name, package_name, stack_version, ignore_errors = True)
Esempio n. 4
0
  def _create_config_links_if_necessary(self, stack_id, stack_version):
    """
    Sets up the required structure for /etc/<component>/conf symlinks and <stack-root>/current
    configuration symlinks IFF the current stack is < HDP 2.3+ and the new stack is >= HDP 2.3

    stack_id:  stack id, ie HDP-2.3
    stack_version:  version to set, ie 2.3.0.0-1234
    """
    if stack_id is None:
      Logger.info("Cannot create config links when stack_id is not defined")
      return

    args = stack_id.upper().split('-')
    if len(args) != 2:
      Logger.info("Unrecognized stack id {0}, cannot create config links".format(stack_id))
      return

    target_stack_version = args[1]
    if not (target_stack_version and check_stack_feature(StackFeature.CONFIG_VERSIONING, target_stack_version)):
      Logger.info("Configuration symlinks are not needed for {0}".format(stack_version))
      return

    for package_name, directories in conf_select.get_package_dirs().iteritems():
      # if already on HDP 2.3, then we should skip making conf.backup folders
      if self.current_stack_version_formatted and check_stack_feature(StackFeature.CONFIG_VERSIONING, self.current_stack_version_formatted):
        conf_selector_name = stack_tools.get_stack_tool_name(stack_tools.CONF_SELECTOR_NAME)
        Logger.info("The current cluster stack of {0} does not require backing up configurations; "
                    "only {1} versioned config directories will be created.".format(stack_version, conf_selector_name))
        # only link configs for all known packages
        conf_select.select(self.stack_name, package_name, stack_version, ignore_errors = True)
      else:
        # link configs and create conf.backup folders for all known packages
        # this will also call conf-select select
        conf_select.convert_conf_directories_to_symlinks(package_name, stack_version, directories,
          skip_existing_links = False, link_to = "backup")
Esempio n. 5
0
  def actionexecute(self, env):
    version = default('/commandParams/version', None)

    if not version:
      raise Fail("Value is required for '/commandParams/version'")
  
    # other os?
    if OSCheck.is_redhat_family():
      cmd = ('/usr/bin/yum', 'clean', 'all')
      code, out = shell.call(cmd, sudo=True)

    formatted_version = format_stack_version(version)
    if not formatted_version:
      raise Fail("Unable to determine a properly formatted stack version from {0}".format(version))

    stack_selector_path = stack_tools.get_stack_tool_path(stack_tools.STACK_SELECTOR_NAME)

    # this script runs on all hosts; if this host doesn't have stack components,
    # then don't invoke the stack tool
    # (no need to log that it's skipped - the function will do that)
    if is_host_skippable(stack_selector_path, formatted_version):
      return

    # invoke "set all"
    cmd = ('ambari-python-wrap', stack_selector_path, 'set', 'all', version)
    code, out = shell.call(cmd, sudo=True)
    if code != 0:
      raise Exception("Command '{0}' exit code is nonzero".format(cmd))

    if check_stack_feature(StackFeature.CONFIG_VERSIONING, formatted_version):
      # backup the old and symlink /etc/[component]/conf to <stack-root>/current/[component]
      for k, v in conf_select.get_package_dirs().iteritems():
        for dir_def in v:
          link_config(dir_def['conf_dir'], dir_def['current_dir'])
Esempio n. 6
0
 def create_config_version(self, env):
     import params
     for package_name, directories in conf_select.get_package_dirs(
     ).iteritems():
         if package_name == 'registry':
             conf_select.convert_conf_directories_to_symlinks(
                 package_name, params.current_version, directories)
Esempio n. 7
0
def link_configs(struct_out_file):
    """
  Use the conf_select module to link configuration directories correctly.
  """
    import params

    json_version = load_version(struct_out_file)

    if not json_version:
        Logger.info(
            "Could not load 'version' from {0}".format(struct_out_file))
        return

    if not params.sysprep_skip_conf_select or not os.path.exists(
            params.conf_select_marker_file):
        # On parallel command execution this should be executed by a single process at a time.
        with FcntlBasedProcessLock(
                params.link_configs_lock_file,
                enabled=params.is_parallel_execution_enabled,
                skip_fcntl_failures=True):
            for package_name, directories in conf_select.get_package_dirs(
            ).iteritems():
                conf_select.convert_conf_directories_to_symlinks(
                    package_name, json_version, directories)

        # create a file to mark that conf-selects were already done
        with open(params.conf_select_marker_file, "wb") as fp:
            pass
    else:
        Logger.info(
            format(
                "Skipping conf-select stage, since cluster-env/sysprep_skip_conf_select is set and mark file {conf_select_marker_file} exists"
            ))
Esempio n. 8
0
    def unlink_all_configs(self, env):
        """
    Reverses the work performed in link_config. This should only be used when downgrading from
    HDP 2.3 to 2.2 in order to under the symlink work required for 2.3.
    """
        stack_name = default('/hostLevelParams/stack_name', "").upper()
        downgrade_to_version = default('/commandParams/version', None)
        downgrade_from_version = default(
            '/commandParams/downgrade_from_version', None)
        upgrade_direction = default("/commandParams/upgrade_direction",
                                    Direction.UPGRADE)

        # downgrade only
        if upgrade_direction != Direction.DOWNGRADE:
            Logger.warning(
                "Unlinking configurations should only be performed on a downgrade."
            )
            return

        # HDP only
        if stack_name != "HDP":
            Logger.warning(
                "Unlinking configurations should only be performed on the HDP stack."
            )
            return

        if downgrade_to_version is None or downgrade_from_version is None:
            Logger.warning(
                "Both 'commandParams/version' and 'commandParams/downgrade_from_version' must be specified to unlink configs on downgrade."
            )
            return

        Logger.info(
            "Unlinking all configs when downgrading from HDP 2.3 to 2.2")

        # normalize the versions
        stack_23 = format_stack_version("2.3")
        downgrade_to_version = format_stack_version(downgrade_to_version)
        downgrade_from_version = format_stack_version(downgrade_from_version)

        # downgrade-to-version must be 2.2 (less than 2.3)
        if compare_versions(downgrade_to_version, stack_23) >= 0:
            Logger.warning(
                "Unlinking configurations should only be performed when downgrading to HDP 2.2"
            )
            return

        # downgrade-from-version must be 2.3+
        if compare_versions(downgrade_from_version, stack_23) < 0:
            Logger.warning(
                "Unlinking configurations should only be performed when downgrading from HDP 2.3 or later"
            )
            return

        # iterate through all directory conf mappings and undo the symlinks
        for key, value in conf_select.get_package_dirs().iteritems():
            for directory_mapping in value:
                original_config_directory = directory_mapping['conf_dir']
                self._unlink_config(original_config_directory)
Esempio n. 9
0
    def _fix_default_links_for_current(self):
        """
    If a prior version of Ambari did not correctly reverse the conf symlinks, then they would
    be put into a bad state when distributing a new stack. For example:

    /etc/component/conf (directory)
    <stack-root>/v1/component/conf -> /etc/component/conf

    When distributing v2, we'd detect the /etc/component/conf problems and would try to adjust it:
    /etc/component/conf -> <stack-root>/current/component/conf
    <stack-root>/v2/component/conf -> /etc/component/v2/0

    The problem is that v1 never gets changed (since the stack being distributed is v2), and
    we end up with a circular link:
    /etc/component/conf -> <stack-root>/current/component/conf
    <stack-root>/v1/component/conf -> /etc/component/conf

    :return: None
    """
        Logger.info(
            "Attempting to fix any configuration symlinks which are not in the correct state"
        )
        from resource_management.libraries.functions import stack_select
        restricted_packages = conf_select.get_restricted_packages()

        if 0 == len(restricted_packages):
            Logger.info(
                "There are no restricted conf-select packages for this installation"
            )
        else:
            Logger.info("Restricting conf-select packages to {0}".format(
                restricted_packages))

        for package_name, directories in conf_select.get_package_dirs(
        ).iteritems():
            Logger.info(
                "Attempting to fix the default conf links for {0}".format(
                    package_name))
            Logger.info(
                "The following directories will be fixed for {0}: {1}".format(
                    package_name, str(directories)))

            component_name = None
            for directory_struct in directories:
                if "component" in directory_struct:
                    component_name = directory_struct["component"]
            if component_name:
                stack_version = stack_select.get_stack_version_before_install(
                    component_name)

            if 0 == len(restricted_packages
                        ) or package_name in restricted_packages:
                if stack_version:
                    conf_select.convert_conf_directories_to_symlinks(
                        package_name, stack_version, directories)
                else:
                    Logger.warning(
                        "Unable to fix {0} since there is no known installed version for this component"
                        .format(package_name))
Esempio n. 10
0
    def unlink_all_configs(self, env):
        """
    Reverses the work performed in link_config. This should only be used when downgrading from
    HDP 2.3 to 2.2 in order to under the symlink work required for 2.3.
    """
        stack_name = default('/hostLevelParams/stack_name', "").upper()
        downgrade_to_version = default('/commandParams/version', None)
        downgrade_from_version = default(
            '/commandParams/downgrade_from_version', None)
        upgrade_direction = default("/commandParams/upgrade_direction",
                                    Direction.UPGRADE)

        # downgrade only
        if upgrade_direction != Direction.DOWNGRADE:
            Logger.warning(
                "Unlinking configurations should only be performed on a downgrade."
            )
            return

        if downgrade_to_version is None or downgrade_from_version is None:
            Logger.warning(
                "Both 'commandParams/version' and 'commandParams/downgrade_from_version' must be specified to unlink configs on downgrade."
            )
            return

        Logger.info(
            "Unlinking all configs when downgrading from {0} {1} to {2}".
            format(stack_name, downgrade_from_version, downgrade_to_version))

        # normalize the versions
        downgrade_to_version = format_stack_version(downgrade_to_version)
        downgrade_from_version = format_stack_version(downgrade_from_version)

        # downgrade-to-version must be 2.2 (less than 2.3)
        if downgrade_to_version and check_stack_feature(
                StackFeature.CONFIG_VERSIONING, downgrade_to_version):
            Logger.warning(
                "Unlinking configurations should not be performed when downgrading {0} {1} to {2}"
                .format(stack_name, downgrade_from_version,
                        downgrade_to_version))
            return

        # downgrade-from-version must be 2.3+
        if not (downgrade_from_version and check_stack_feature(
                StackFeature.CONFIG_VERSIONING, downgrade_from_version)):
            Logger.warning(
                "Unlinking configurations should not be performed when downgrading {0} {1} to {2}"
                .format(stack_name, downgrade_from_version,
                        downgrade_to_version))
            return

        # iterate through all directory conf mappings and undo the symlinks
        for key, value in conf_select.get_package_dirs().iteritems():
            for directory_mapping in value:
                original_config_directory = directory_mapping['conf_dir']
                self._unlink_config(original_config_directory)
Esempio n. 11
0
  def test_hook_default_conf_select(self, rmtree_mock, symlink_mock, conf_select_select_mock, conf_select_create_mock):

    def mocked_conf_select(arg1, arg2, arg3, dry_run = False):
      return "/etc/{0}/{1}/0".format(arg2, arg3)

    conf_select_create_mock.side_effect = mocked_conf_select

    config_file = self.get_src_folder() + "/test/python/stacks/configs/default.json"
    with open(config_file, "r") as f:
      json_content = json.load(f)

    version = '2.3.0.0-1234'
    json_content['commandParams']['version'] = version
    json_content['clusterLevelParams']['stack_version'] = "2.3"

    self.executeScript("after-INSTALL/scripts/hook.py",
                       classname="AfterInstallHook",
                       command="hook",
                       target=RMFTestCase.TARGET_STACK_HOOKS,
                       config_dict = json_content,
                       config_overrides = self.CONFIG_OVERRIDES)


    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hive-server2', '2.3.0.0-1234'),
      sudo = True)

    self.assertResourceCalled('XmlConfig', 'core-site.xml',
      owner = 'hdfs',
      group = 'hadoop',
      conf_dir = "/usr/hdp/2.3.0.0-1234/hadoop/conf",
      configurations = self.getConfig()['configurations']['core-site'],
      configuration_attributes = self.getConfig()['configurationAttributes']['core-site'],
      only_if="ls /usr/hdp/2.3.0.0-1234/hadoop/conf",
      xml_include_file=None)

    self.assertResourceCalled('Directory',
                              '/usr/lib/ambari-logsearch-logfeeder/conf',
                              mode = 0755,
                              cd_access = 'a',
                              create_parents = True)

    package_dirs = conf_select.get_package_dirs();
    for package, dir_defs in package_dirs.iteritems():
      for dir_def in dir_defs:
        conf_dir = dir_def['conf_dir']
        conf_backup_dir = conf_dir + ".backup"
        current_dir = dir_def['current_dir']
        self.assertResourceCalled('Execute', ('cp', '-R', '-p', conf_dir, conf_backup_dir),
            not_if = 'test -e ' + conf_backup_dir,
            sudo = True,)

        self.assertResourceCalled('Directory', conf_dir, action = ['delete'],)
        self.assertResourceCalled('Link', conf_dir, to = current_dir,)

    self.assertNoMoreResources()
Esempio n. 12
0
    def test_symlink_noop(self):
        """
    Tests that conf-select symlinking does nothing if the directory doesn't exist
    :return:
    """
        packages = conf_select.get_package_dirs()

        conf_select.convert_conf_directories_to_symlinks(
            "hadoop", "2.3.0.0-1234", packages["hadoop"])

        self.assertEqual(pprint.pformat(self.env.resource_list), "[]")
Esempio n. 13
0
    def test_symlink_conversion_relinks_wrong_link(self):
        """
    Tests that conf-select symlinking can detect a wrong directory
    :return:
    """
        packages = conf_select.get_package_dirs()

        conf_select.convert_conf_directories_to_symlinks(
            "hadoop", "2.3.0.0-1234", packages["hadoop"])

        self.assertEqual(
            pprint.pformat(self.env.resource_list),
            "[Link['/etc/hadoop/conf'], Link['/etc/hadoop/conf']]")
Esempio n. 14
0
    def test_symlink_conversion_to_current(self, islink_mock, path_mock,
                                           isdir_mock, shell_call_mock):
        """
    Tests that conf-select creates the correct symlink directories.
    :return:
    """
        def mock_call(command, **kwargs):
            """
      Instead of shell.call, call a command whose output equals the command.
      :param command: Command that will be echoed.
      :return: Returns a tuple of (process output code, stdout, stderr)
      """
            return (0, "/etc/hadoop/conf", None)

        def path_mock_call(path):
            if path == "/etc/hadoop/conf":
                return True

            if path == "/etc/hadoop/2.3.0.0-1234/0":
                return True

            return False

        def islink_mock_call(path):
            if path == "/etc/hadoop/conf":
                return False

            return False

        def isdir_mock_call(path):
            if path == "/etc/hadoop/conf":
                return True

            return False

        packages = conf_select.get_package_dirs()

        path_mock.side_effect = path_mock_call
        islink_mock.side_effect = islink_mock_call
        shell_call_mock.side_effect = mock_call
        conf_select.convert_conf_directories_to_symlinks(
            "hadoop", "2.3.0.0-1234", packages["hadoop"])

        self.assertEqual(
            pprint.pformat(self.env.resource_list[0]),
            "Execute[('cp', '-R', '-p', u'/etc/hadoop/conf', u'/etc/hadoop/conf.backup')]"
        )
        self.assertEqual(pprint.pformat(self.env.resource_list[1]),
                         "Directory['/etc/hadoop/conf']")
        self.assertEqual(pprint.pformat(self.env.resource_list[2]),
                         "Link['/etc/hadoop/conf']")
Esempio n. 15
0
    def _relink_configurations_with_conf_select(self, stack_id, stack_version):
        """
    Sets up the required structure for /etc/<component>/conf symlinks and <stack-root>/current
    configuration symlinks IFF the current stack is < HDP 2.3+ and the new stack is >= HDP 2.3

    stack_id:  stack id, ie HDP-2.3
    stack_version:  version to set, ie 2.3.0.0-1234
    """
        if stack_id is None:
            Logger.info(
                "Cannot create config links when stack_id is not defined")
            return

        args = stack_id.upper().split('-')
        if len(args) != 2:
            Logger.info(
                "Unrecognized stack id {0}, cannot create config links".format(
                    stack_id))
            return

        target_stack_version = args[1]
        if not (target_stack_version and check_stack_feature(
                StackFeature.CONFIG_VERSIONING, target_stack_version)):
            Logger.info("Configuration symlinks are not needed for {0}".format(
                stack_version))
            return

        # After upgrading hdf-select package from HDF-2.X to HDF-3.Y, we need to create this symlink
        if self.stack_name.upper() == "HDF" \
                and not sudo.path_exists("/usr/bin/conf-select") and sudo.path_exists("/usr/bin/hdfconf-select"):
            Link("/usr/bin/conf-select", to="/usr/bin/hdfconf-select")

        restricted_packages = conf_select.get_restricted_packages()

        if 0 == len(restricted_packages):
            Logger.info(
                "There are no restricted conf-select packages for this installation"
            )
        else:
            Logger.info("Restricting conf-select packages to {0}".format(
                restricted_packages))

        for package_name, directories in conf_select.get_package_dirs(
        ).iteritems():
            if 0 == len(restricted_packages
                        ) or package_name in restricted_packages:
                conf_select.convert_conf_directories_to_symlinks(
                    package_name, stack_version, directories)
Esempio n. 16
0
def link_configs(struct_out_file):
  """
  Links configs, only on a fresh install of HDP-2.3 and higher
  """
  import params

  json_version = load_version(struct_out_file)

  if not json_version:
    Logger.info("Could not load 'version' from {0}".format(struct_out_file))
    return

  # On parallel command execution this should be executed by a single process at a time.
  with FcntlBasedProcessLock(params.link_configs_lock_file, enabled = params.is_parallel_execution_enabled, skip_fcntl_failures = True):
    for k, v in conf_select.get_package_dirs().iteritems():
      conf_select.convert_conf_directories_to_symlinks(k, json_version, v)
Esempio n. 17
0
def link_configs(struct_out_file):
  """
  Use the conf_select module to link configuration directories correctly.
  """
  import params

  json_version = load_version(struct_out_file)

  if not json_version:
    Logger.info("Could not load 'version' from {0}".format(struct_out_file))
    return

  # On parallel command execution this should be executed by a single process at a time.
  with FcntlBasedProcessLock(params.link_configs_lock_file, enabled = params.is_parallel_execution_enabled, skip_fcntl_failures = True):
    for package_name, directories in conf_select.get_package_dirs().iteritems():
      conf_select.convert_conf_directories_to_symlinks(package_name, json_version, directories)
Esempio n. 18
0
    def test_symlink_conversion_bad_linkto(self):
        """
    Tests that a bad enum throws an exception.
    :return:
    """
        packages = conf_select.get_package_dirs()

        try:
            conf_select.convert_conf_directories_to_symlinks(
                "hadoop",
                "2.3.0.0-1234",
                packages["hadoop"],
                link_to="INVALID")
            raise Exception(
                "Expected failure when supplying a bad enum for link_to")
        except:
            pass
Esempio n. 19
0
def link_configs(struct_out_file):
    """
  Links configs, only on a fresh install of BigInsights-4.1 and higher
  """

    if not Script.is_stack_greater_or_equal("4.1"):
        Logger.info("Can only link configs for BigInsights-4.1 and higher.")
        return

    json_version = load_version(struct_out_file)

    if not json_version:
        Logger.info(
            "Could not load 'version' from {0}".format(struct_out_file))
        return

    for k, v in conf_select.get_package_dirs().iteritems():
        conf_select.convert_conf_directories_to_symlinks(k, json_version, v)
Esempio n. 20
0
    def _fix_default_links(self, package_name, component_name):
        """
    If a prior version of Ambari did not correctly reverse the conf symlinks, then they would
    be put into a bad state when distributing a new stack. For example:

    /etc/component/conf (directory)
    <stack-root>/v1/component/conf -> /etc/component/conf

    When distributing v2, we'd detect the /etc/component/conf problems and would try to adjust it:
    /etc/component/conf -> <stack-root>/current/component/conf
    <stack-root>/v2/component/conf -> /etc/component/v2/0

    The problem is that v1 never gets changed (since the stack being distributed is v2), and
    we end up with a circular link:
    /etc/component/conf -> <stack-root>/current/component/conf
    <stack-root>/v1/component/conf -> /etc/component/conf

    :return: None
    """
        from resource_management.libraries.functions import stack_select
        package_dirs = conf_select.get_package_dirs()
        if package_name in package_dirs:
            Logger.info(
                "Determining if the default conf links for {0} need to be fixed"
                .format(package_name))

            directories = package_dirs[package_name]
            Logger.info(
                "The following directories will be checked for {0}: {1}".
                format(package_name, str(directories)))

            stack_version = stack_select.get_stack_version_before_install(
                component_name)
            if stack_version:
                conf_select.convert_conf_directories_to_symlinks(
                    package_name, stack_version, directories)
Esempio n. 21
0
  def test_downgrade_unlink_configs(self, family_mock, get_config_mock, call_mock,
                                    isdir_mock, islink_mock):
    """
    Tests downgrading from 2.3 to 2.2 to ensure that conf symlinks are removed and the backup
    directories restored.
    """

    isdir_mock.return_value = True

    # required for the test to run since the Execute calls need this
    from resource_management.core.environment import Environment
    env = Environment(test_mode=True)
    with env:
      # Mock the config objects
      json_file_path = os.path.join(self.get_custom_actions_dir(), "ru_execute_tasks_namenode_prepare.json")
      self.assertTrue(os.path.isfile(json_file_path))
      with open(json_file_path, "r") as json_file:
        json_payload = json.load(json_file)

      # alter JSON for a downgrade from 2.3 to 2.2
      json_payload['commandParams']['version'] = "2.2.0.0-1234"
      json_payload['commandParams']['downgrade_from_version'] = "2.3.0.0-1234"
      json_payload['commandParams']['original_stack'] = "HDP-2.2"
      json_payload['commandParams']['target_stack'] = "HDP-2.3"
      json_payload['commandParams']['upgrade_direction'] = "downgrade"
      json_payload['hostLevelParams']['stack_version'] = "2.2"

      config_dict = ConfigDictionary(json_payload)

      family_mock.return_value = True
      get_config_mock.return_value = config_dict
      call_mock.side_effect = fake_call   # echo the command

      # test the function
      ru_execute = UpgradeSetAll()
      ru_execute.unlink_all_configs(None)

      # verify that os.path.islink was called for each conf
      self.assertTrue(islink_mock.called)
      for key, value in conf_select.get_package_dirs().iteritems():
        for directory_mapping in value:
          original_config_directory = directory_mapping['conf_dir']
          is_link_called = False

          for call in islink_mock.call_args_list:
            call_tuple = call[0]
            if original_config_directory in call_tuple:
              is_link_called = True

          if not is_link_called:
            self.fail("os.path.islink({0}) was never called".format(original_config_directory))

      # alter JSON for a downgrade from 2.3 to 2.3
      with open(json_file_path, "r") as json_file:
        json_payload = json.load(json_file)

      json_payload['commandParams']['version'] = "2.3.0.0-1234"
      json_payload['commandParams']['downgrade_from_version'] = "2.3.0.0-5678"
      json_payload['commandParams']['original_stack'] = "HDP-2.3"
      json_payload['commandParams']['target_stack'] = "HDP-2.3"
      json_payload['commandParams']['upgrade_direction'] = "downgrade"
      json_payload['hostLevelParams']['stack_version'] = "2.3"

      # reset config
      config_dict = ConfigDictionary(json_payload)
      family_mock.return_value = True
      get_config_mock.return_value = config_dict

      # reset mock
      islink_mock.reset_mock()

      # test the function
      ru_execute = UpgradeSetAll()
      ru_execute.unlink_all_configs(None)

      # ensure it wasn't called this time
      self.assertFalse(islink_mock.called)

      with open(json_file_path, "r") as json_file:
        json_payload = json.load(json_file)

      # alter JSON for a downgrade from 2.2 to 2.2
      json_payload['commandParams']['version'] = "2.2.0.0-1234"
      json_payload['commandParams']['downgrade_from_version'] = "2.2.0.0-5678"
      json_payload['commandParams']['original_stack'] = "HDP-2.2"
      json_payload['commandParams']['target_stack'] = "HDP-2.2"
      json_payload['commandParams']['upgrade_direction'] = "downgrade"
      json_payload['hostLevelParams']['stack_version'] = "2.2"

      # reset config
      config_dict = ConfigDictionary(json_payload)
      family_mock.return_value = True
      get_config_mock.return_value = config_dict

      # reset mock
      islink_mock.reset_mock()

      # test the function
      ru_execute = UpgradeSetAll()
      ru_execute.unlink_all_configs(None)

      # ensure it wasn't called this time
      self.assertFalse(islink_mock.called)
Esempio n. 22
0
  def test_hook_default_conf_select_with_error(self, rmtree_mock, symlink_mock, conf_select_select_mock, conf_select_create_mock):

    def mocked_conf_select(arg1, arg2, arg3, dry_run = False, ignore_errors = False):
      if arg2 == "pig" and not dry_run:
        if not ignore_errors:
          raise Exception("whoops")
        else:
          return None
      return "/etc/{0}/{1}/0".format(arg2, arg3)

    conf_select_create_mock.side_effect = mocked_conf_select
    conf_select_select_mock.side_effect = mocked_conf_select

    config_file = self.get_src_folder() + "/test/python/stacks/2.0.6/configs/default.json"
    with open(config_file, "r") as f:
      json_content = json.load(f)

    version = '2.3.0.0-1234'
    json_content['commandParams']['version'] = version
    json_content['hostLevelParams']['stack_version'] = "2.3"

    self.executeScript("2.0.6/hooks/after-INSTALL/scripts/hook.py",
                       classname="AfterInstallHook",
                       command="hook",
                       config_dict = json_content,
                       config_overrides = self.CONFIG_OVERRIDES)


    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hive-server2', '2.3.0.0-1234'),
      sudo = True)

    self.assertResourceCalled('XmlConfig', 'core-site.xml',
      owner = 'hdfs',
      group = 'hadoop',
      conf_dir = "/usr/hdp/current/hadoop-client/conf",
      configurations = self.getConfig()['configurations']['core-site'],
      configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
      only_if="ls /usr/hdp/current/hadoop-client/conf")

    self.assertResourceCalled('Directory',
                              '/etc/ambari-logsearch-logfeeder/conf',
                              mode = 0755,
                              cd_access = 'a',
                              create_parents = True)

    package_dirs = conf_select.get_package_dirs();
    for package, dir_defs in package_dirs.iteritems():
      for dir_def in dir_defs:
        conf_dir = dir_def['conf_dir']
        conf_backup_dir = conf_dir + ".backup"
        self.assertResourceCalled('Execute', ('cp', '-R', '-p', conf_dir, conf_backup_dir),
            not_if = 'test -e ' + conf_backup_dir,
            sudo = True,)

      for dir_def in dir_defs:
        conf_dir = dir_def['conf_dir']
        current_dir = dir_def['current_dir']
        self.assertResourceCalled('Directory', conf_dir,
            action = ['delete'],)
        self.assertResourceCalled('Link', conf_dir,
            to = current_dir,)

      #HACK for Atlas
      if package in ["atlas", ]:
        self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E cp -R --no-clobber /etc/atlas/conf.backup/* /etc/atlas/conf',
                                  only_if = 'test -e ' + "/etc/atlas/conf")

    self.assertNoMoreResources()
Esempio n. 23
0
    def test_hook_default_conf_select(self, rmtree_mock, symlink_mock,
                                      conf_select_select_mock,
                                      conf_select_create_mock):
        def mocked_conf_select(arg1, arg2, arg3, dry_run=False):
            return "/etc/{0}/{1}/0".format(arg2, arg3)

        conf_select_create_mock.side_effect = mocked_conf_select

        config_file = self.get_src_folder(
        ) + "/test/python/stacks/2.0.6/configs/default.json"
        with open(config_file, "r") as f:
            json_content = json.load(f)

        version = '2.3.0.0-1234'
        json_content['commandParams']['version'] = version
        json_content['hostLevelParams']['stack_version'] = "2.3"

        self.executeScript("2.0.6/hooks/after-INSTALL/scripts/hook.py",
                           classname="AfterInstallHook",
                           command="hook",
                           config_dict=json_content)

        self.assertResourceCalled(
            'Execute',
            'ambari-sudo.sh /usr/bin/hdp-select set all `ambari-python-wrap /usr/bin/hdp-select versions | grep ^2.3 | tail -1`',
            only_if='ls -d /usr/hdp/2.3*')

        self.assertResourceCalled(
            'XmlConfig',
            'core-site.xml',
            owner='hdfs',
            group='hadoop',
            conf_dir="/usr/hdp/current/hadoop-client/conf",
            configurations=self.getConfig()['configurations']['core-site'],
            configuration_attributes=self.getConfig()
            ['configuration_attributes']['core-site'],
            only_if="ls /usr/hdp/current/hadoop-client/conf")

        package_dirs = conf_select.get_package_dirs()
        for package, dir_defs in package_dirs.iteritems():
            for dir_def in dir_defs:
                conf_dir = dir_def['conf_dir']
                conf_backup_dir = conf_dir + ".backup"
                self.assertResourceCalled(
                    'Execute',
                    ('cp', '-R', '-p', conf_dir, conf_backup_dir),
                    not_if='test -e ' + conf_backup_dir,
                    sudo=True,
                )

            for dir_def in dir_defs:
                conf_dir = dir_def['conf_dir']
                current_dir = dir_def['current_dir']
                self.assertResourceCalled(
                    'Directory',
                    conf_dir,
                    action=['delete'],
                )
                self.assertResourceCalled(
                    'Link',
                    conf_dir,
                    to=current_dir,
                )

            #HACK for Atlas
            if package in [
                    "atlas",
            ]:
                self.assertResourceCalled(
                    'Execute',
                    'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E cp -R --no-clobber /etc/atlas/conf.backup/* /etc/atlas/conf',
                    only_if='test -e ' + "/etc/atlas/conf")

        self.assertNoMoreResources()
Esempio n. 24
0
    def test_hook_default_conf_select_suspended(self, rmtree_mock,
                                                symlink_mock,
                                                conf_select_select_mock,
                                                conf_select_create_mock):
        def mocked_conf_select(arg1, arg2, arg3, dry_run=False):
            return "/etc/{0}/{1}/0".format(arg2, arg3)

        conf_select_create_mock.side_effect = mocked_conf_select

        config_file = self.get_src_folder(
        ) + "/test/python/stacks/2.0.6/configs/default.json"
        with open(config_file, "r") as f:
            json_content = json.load(f)

        version = '2.3.0.0-1234'
        json_content['commandParams']['version'] = version
        json_content['hostLevelParams']['stack_version'] = "2.3"
        json_content['roleParams']['upgrade_suspended'] = "true"

        self.executeScript("2.0.6/hooks/after-INSTALL/scripts/hook.py",
                           classname="AfterInstallHook",
                           command="hook",
                           config_dict=json_content,
                           config_overrides=self.CONFIG_OVERRIDES)

        # same assertions as test_hook_default_conf_select, but skip hdp-select set all

        self.assertResourceCalled(
            'XmlConfig',
            'core-site.xml',
            owner='hdfs',
            group='hadoop',
            conf_dir="/usr/hdp/2.3.0.0-1234/hadoop/conf",
            configurations=self.getConfig()['configurations']['core-site'],
            configuration_attributes=self.getConfig()
            ['configuration_attributes']['core-site'],
            only_if="ls /usr/hdp/2.3.0.0-1234/hadoop/conf")

        package_dirs = conf_select.get_package_dirs()
        for package, dir_defs in package_dirs.iteritems():
            for dir_def in dir_defs:
                conf_dir = dir_def['conf_dir']
                conf_backup_dir = conf_dir + ".backup"
                current_dir = dir_def['current_dir']
                self.assertResourceCalled(
                    'Execute',
                    ('cp', '-R', '-p', conf_dir, conf_backup_dir),
                    not_if='test -e ' + conf_backup_dir,
                    sudo=True,
                )

                self.assertResourceCalled(
                    'Directory',
                    conf_dir,
                    action=['delete'],
                )
                self.assertResourceCalled(
                    'Link',
                    conf_dir,
                    to=current_dir,
                )

        self.assertNoMoreResources()