Пример #1
0
    def pre_upgrade_restart(self, env, upgrade_type=None):
        import params
        env.set_params(params)

        if params.version and compare_versions(
                format_stack_version(params.version), '4.1.0.0') >= 0:
            stack_select.select_packages(params.version)

        # This is extremely important since it should only be called if crossing the IOP 4.2 boundary.
        # This is extremely important since it should only be called if crossing the HDP 2.3.4.0 boundary.
        if params.version and params.upgrade_direction:
            src_version = dst_version = None
            if params.upgrade_direction == Direction.UPGRADE:
                src_version = upgrade_summary.get_source_version(
                    "KAFKA", default_version=params.version)
                dst_version = upgrade_summary.get_target_version(
                    "KAFKA", default_version=params.version)
            else:
                # These represent the original values during the UPGRADE direction
                src_version = upgrade_summary.get_target_version(
                    "KAFKA", default_version=params.version)
                dst_version = upgrade_summary.get_source_version(
                    "KAFKA", default_version=params.version)

            if compare_versions(src_version,
                                '4.2.0.0') < 0 and compare_versions(
                                    dst_version, '4.2.0.0') >= 0:
                # Upgrade from IOP 4.1 to 4.2, Calling the acl migration script requires the configs to be present.
                self.configure(env, upgrade_type=upgrade_type)
                upgrade.run_migration(env, upgrade_type)
Пример #2
0
    def pre_upgrade_restart(self, env, upgrade_type=None):
        import params
        env.set_params(params)

        # grab the current version of the component
        pre_upgrade_version = stack_select.get_role_component_current_stack_version(
        )

        if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE,
                                                  params.version):
            stack_select.select_packages(params.version)

        # This is extremely important since it should only be called if crossing the HDP 2.3.4.0 boundary.
        if params.version and params.upgrade_direction:
            src_version = dst_version = None
            if params.upgrade_direction == Direction.UPGRADE:
                src_version = upgrade_summary.get_source_version(
                    "KAFKA", default_version=params.version)
                dst_version = upgrade_summary.get_target_version(
                    "KAFKA", default_version=params.version)
            else:
                # These represent the original values during the UPGRADE direction
                src_version = upgrade_summary.get_target_version(
                    "KAFKA", default_version=params.version)
                dst_version = upgrade_summary.get_source_version(
                    "KAFKA", default_version=params.version)

            if not check_stack_feature(
                    StackFeature.KAFKA_ACL_MIGRATION_SUPPORT,
                    src_version) and check_stack_feature(
                        StackFeature.KAFKA_ACL_MIGRATION_SUPPORT, dst_version):
                # Calling the acl migration script requires the configs to be present.
                self.configure(env, upgrade_type=upgrade_type)
                upgrade.run_migration(env, upgrade_type)
Пример #3
0
    def test_get_stack_feature_version_missing_params(self):
        """
    Tests that simple upgrade information can be extracted from JSON
    :return:
    """
        command_json = TestUpgradeSummary._get_cluster_simple_upgrade_json()
        Script.config = command_json

        summary = upgrade_summary.get_upgrade_summary()
        self.assertEqual(False, summary.is_revert)
        self.assertEqual("UPGRADE", summary.direction)
        self.assertEqual("STANDARD", summary.orchestration)
        self.assertEqual("rolling_upgrade", summary.type)

        services = summary.services
        self.assertEqual("2.4.0.0-1234", services["HDFS"].source_version)
        self.assertEqual("2.5.9.9-9999", services["HDFS"].target_version)

        self.assertEqual("2.4.0.0-1234",
                         upgrade_summary.get_source_version("HDFS"))
        self.assertEqual("2.5.9.9-9999",
                         upgrade_summary.get_target_version("HDFS"))

        self.assertTrue(
            upgrade_summary.get_downgrade_from_version("HDFS") is None)
Пример #4
0
    def convert_tables(self, env):
        import params
        env.set_params(params)

        source_version = upgrade_summary.get_source_version(
            service_name="HIVE")
        target_version = upgrade_summary.get_target_version(
            service_name="HIVE")

        source_dir = format("/usr/hdp/{source_version}")
        target_dir = format("/usr/hdp/{target_version}")

        if params.security_enabled:
            hive_kinit_cmd = format(
                "{kinit_path_local} -kt {hive_server2_keytab} {hive_principal}; "
            )
            Execute(hive_kinit_cmd, user=params.hive_user)

        # in the M10 release PreUpgradeTool was fixed to use Hive1 instead of Hive2
        if target_version >= "3.0.3":
            hive_lib_dir = format("{source_dir}/hive/lib")
        else:
            hive_lib_dir = format("{source_dir}/hive2/lib")

        classpath = format(
            "{hive_lib_dir}/*:{source_dir}/hadoop/*:{source_dir}/hadoop/lib/*:{source_dir}/hadoop-mapreduce/*:{source_dir}/hadoop-mapreduce/lib/*:{source_dir}/hadoop-hdfs/*:{source_dir}/hadoop-hdfs/lib/*:{source_dir}/hadoop/etc/hadoop/:{target_dir}/hive/lib/hive-pre-upgrade.jar:{source_dir}/hive/conf/conf.server"
        )
        # hack to avoid derby cp issue we want derby-10.10.2.0.jar to appear first in cp, if its available, note other derby jars are derbyclient-10.11.1.1.jar  derbynet-10.11.1.1.jar
        derby_jars = glob.glob(source_dir + "/hive2/lib/*derby-*.jar")
        if len(derby_jars) == 1:
            classpath = derby_jars[0] + ":" + classpath
        cmd = format(
            "{java64_home}/bin/java -Djavax.security.auth.useSubjectCredsOnly=false -cp {classpath} org.apache.hadoop.hive.upgrade.acid.PreUpgradeTool -execute"
        )
        Execute(cmd, user=params.hive_user)
Пример #5
0
    def convert_tables(self, env):
        import params
        env.set_params(params)

        source_version = upgrade_summary.get_source_version(
            service_name="HIVE")
        target_version = upgrade_summary.get_target_version(
            service_name="HIVE")

        source_dir = format("/usr/hdp/{source_version}")
        target_dir = format("/usr/hdp/{target_version}")

        if params.security_enabled:
            hive_kinit_cmd = format(
                "{kinit_path_local} -kt {hive_server2_keytab} {hive_principal}; "
            )
            Execute(hive_kinit_cmd, user=params.hive_user)

        classpath = format(
            "{source_dir}/hive2/lib/*:{source_dir}/hadoop/*:{source_dir}/hadoop/lib/*:{source_dir}/hadoop-mapreduce/*:{source_dir}/hadoop-mapreduce/lib/*:{target_dir}/hive/lib/hive-pre-upgrade.jar:{source_dir}/hive/conf"
        )
        cmd = format(
            "{java64_home}/bin/java -Djavax.security.auth.useSubjectCredsOnly=false -cp {classpath} org.apache.hadoop.hive.upgrade.acid.PreUpgradeTool -execute"
        )
        Execute(cmd, user=params.hive_user)
Пример #6
0
def get_current_version(service=None, use_upgrading_version_during_upgrade=True):
  """
  Get the effective version to use to copy the tarballs to.
  :param service: the service name when checking for an upgrade.  made optional for unknown \
    code bases that may be using this function
  :param use_upgrading_version_during_upgrade: True, except when the RU/EU hasn't started yet.
  :return: Version, or False if an error occurred.
  """

  from resource_management.libraries.functions import upgrade_summary

  # get the version for this command
  version = stack_features.get_stack_feature_version(Script.get_config())
  if service is not None:
    version = upgrade_summary.get_target_version(service_name=service, default_version=version)


  # if there is no upgrade, then use the command's version
  if not Script.in_stack_upgrade() or use_upgrading_version_during_upgrade:
    Logger.info("Tarball version was calcuated as {0}. Use Command Version: {1}".format(
      version, use_upgrading_version_during_upgrade))

    return version

  # we're in an upgrade and we need to use an older version
  current_version = stack_select.get_role_component_current_stack_version()
  if service is not None:
    current_version = upgrade_summary.get_source_version(service_name=service, default_version=current_version)

  if current_version is None:
    Logger.warning("Unable to determine the current version of the component for this command; unable to copy the tarball")
    return False

  return current_version;
Пример #7
0
    def update_atlas_simple_authz(self, env):
        import params
        env.set_params(params)
        if params.upgrade_direction == Direction.UPGRADE:
            orchestration = stack_select.PACKAGE_SCOPE_STANDARD
            summary = upgrade_summary.get_upgrade_summary()

            if summary is not None:
                orchestration = summary.orchestration
                if orchestration is None:
                    raise Fail(
                        "The upgrade summary does not contain an orchestration type"
                    )

                if orchestration.upper(
                ) in stack_select._PARTIAL_ORCHESTRATION_SCOPES:
                    orchestration = stack_select.PACKAGE_SCOPE_PATCH

            stack_select_packages = stack_select.get_packages(
                orchestration,
                service_name="ATLAS",
                component_name="ATLAS_SERVER")
            if stack_select_packages is None:
                raise Fail("Unable to get packages for stack-select")

            Logger.info(
                "ATLAS_SERVER component will be stack-selected to version {0} using a {1} orchestration"
                .format(params.version, orchestration.upper()))

            for stack_select_package_name in stack_select_packages:
                stack_select.select(stack_select_package_name, params.version)
            Directory(
                format('{metadata_home}/'),
                owner=params.metadata_user,
                group=params.user_group,
                recursive_ownership=True,
            )

            target_version = upgrade_summary.get_target_version('ATLAS')
            update_atlas_simple_authz_script = os.path.join(
                format('{stack_root}'), target_version, 'atlas', 'bin',
                'atlas_update_simple_auth_json.py')
            update_atlas_simple_authz_command = format(
                'source {params.conf_dir}/atlas-env.sh ; {update_atlas_simple_authz_script} {conf_dir}'
            )
            Execute(
                update_atlas_simple_authz_command,
                only_if=format("test -e {update_atlas_simple_authz_script}"),
                user=params.metadata_user)
            atlas_simple_auth_policy_file = os.path.join(
                format('{conf_dir}'), 'atlas-simple-authz-policy.json')
            File(atlas_simple_auth_policy_file,
                 group=params.user_group,
                 owner=params.metadata_user,
                 only_if=format("test -e {atlas_simple_auth_policy_file}"),
                 mode=0644)
Пример #8
0
    def move_tables(self, env):
        import params
        env.set_params(params)

        create_hive_hdfs_dirs()

        target_version = upgrade_summary.get_target_version(
            service_name="HIVE")

        hive_script = format("{install_dir}/bin/hive")
        cmd = format(
            "{hive_script} --config /etc/hive --service  strictmanagedmigration --hiveconf hive.strict.managed.tables=true  -m automatic  --modifyManagedTables --oldWarehouseRoot /apps/hive/warehouse"
        )
        Execute(cmd,
                environment={'JAVA_HOME': params.java64_home},
                user=params.hdfs_user)
Пример #9
0
    def setup_ranger_kms_database(self, env):
        import params
        env.set_params(params)

        upgrade_stack = stack_select._get_upgrade_stack()
        if upgrade_stack is None:
            raise Fail('Unable to determine the stack and stack version')

        stack_version = upgrade_stack[1]
        target_version = upgrade_summary.get_target_version(
            "RANGER_KMS", default_version=stack_version)
        Logger.info(
            format(
                'Setting Ranger KMS database schema, using version {target_version}'
            ))
        kms.setup_kms_db(stack_version=target_version)
Пример #10
0
    def setup_ranger_java_patches(self, env):
        import params
        env.set_params(params)

        upgrade_stack = stack_select._get_upgrade_stack()
        if upgrade_stack is None:
            raise Fail('Unable to determine the stack and stack version')

        stack_version = upgrade_stack[1]

        if params.upgrade_direction == Direction.UPGRADE:
            target_version = upgrade_summary.get_target_version(
                "RANGER", default_version=stack_version)
            Logger.info(
                format(
                    'Applying Ranger java patches, using version {target_version}'
                ))

            setup_ranger_xml.setup_java_patch(stack_version=target_version)
Пример #11
0
    def convert_tables(self, env):
        import params
        env.set_params(params)

        source_version = upgrade_summary.get_source_version(
            service_name="HIVE")
        target_version = upgrade_summary.get_target_version(
            service_name="HIVE")

        source_dir = format("/usr/hdp/{source_version}")
        target_dir = format("/usr/hdp/{target_version}")

        classpath = format(
            "{source_dir}/hive2/lib/*:{source_dir}/hadoop/*:{source_dir}/hadoop/lib/*:{source_dir}/hadoop-mapreduce/*:{source_dir}/hadoop-mapreduce/lib/*:{target_dir}/hive/lib/hive-pre-upgrade.jar:{source_dir}/hive/conf"
        )
        cmd = format(
            "{java64_home}/bin/java -cp {classpath} org.apache.hadoop.hive.upgrade.acid.PreUpgradeTool -execute"
        )
        Execute(cmd, user="******")
Пример #12
0
    def setup_ranger_database(self, env):
        import params
        env.set_params(params)

        upgrade_stack = stack_select._get_upgrade_stack()
        if upgrade_stack is None:
            raise Fail('Unable to determine the stack and stack version')

        stack_version = upgrade_stack[1]

        if params.xml_configurations_supported and params.upgrade_direction == Direction.UPGRADE:
            target_version = upgrade_summary.get_target_version(
                "RANGER", default_version=stack_version)
            Logger.info(
                format(
                    'Setting Ranger database schema, using version {target_version}'
                ))

            from setup_ranger_xml import setup_ranger_db
            setup_ranger_db(stack_version=target_version)
Пример #13
0
    def configure_atlas_for_upgrade(self, env):
        import params
        env.set_params(params)
        if params.upgrade_direction == Direction.UPGRADE:
            if params.stack_supports_atlas_ranger_plugin and params.enable_ranger_atlas:
                atlas_policycache_file_v1 = os.path.join(
                    '/etc', 'ranger', params.repo_name, 'policycache',
                    format('atlas_{repo_name}.json'))
                atlas_policycache_file_v1_rename = os.path.join(
                    '/etc', 'ranger', params.repo_name, 'policycache',
                    format('atlas_{repo_name}_v1.json'))
                if os.path.isfile(atlas_policycache_file_v1):
                    Execute(format(
                        'mv {atlas_policycache_file_v1} {atlas_policycache_file_v1_rename}'
                    ),
                            user=params.metadata_user,
                            logoutput=True)

            target_version = upgrade_summary.get_target_version('ATLAS')
            atlas_simple_auth_policy_file_source = os.path.join(
                format('{stack_root}'), target_version, 'etc', 'atlas',
                'conf.dist', 'atlas-simple-authz-policy.json')
            atlas_simple_auth_policy_file_target = os.path.join(
                format('{conf_dir}'), 'atlas-simple-authz-policy.json')
            Execute(('cp', '-f', atlas_simple_auth_policy_file_source,
                     atlas_simple_auth_policy_file_target),
                    not_if=format(
                        'test -e {atlas_simple_auth_policy_file_target}'),
                    only_if=format(
                        'test -e {atlas_simple_auth_policy_file_source}'),
                    sudo=True)
            File(atlas_simple_auth_policy_file_target,
                 group=params.user_group,
                 owner=params.metadata_user,
                 only_if=format(
                     "test -e {atlas_simple_auth_policy_file_target}"),
                 mode=0644)