Beispiel #1
0
security_enabled = config['configurations']['cluster-env']['security_enabled']

storm_ui_host = default("/clusterHostInfo/storm_ui_server_hosts", [])

if security_enabled:
  _hostname_lowercase = config['hostname'].lower()
  _storm_principal_name = config['configurations']['storm-env']['storm_principal_name']
  storm_jaas_principal = _storm_principal_name.replace('_HOST',_hostname_lowercase)
  storm_keytab_path = config['configurations']['storm-env']['storm_keytab']

  if stack_is_hdp22_or_further:
    storm_ui_keytab_path = config['configurations']['storm-env']['storm_ui_keytab']
    _storm_ui_jaas_principal_name = config['configurations']['storm-env']['storm_ui_principal_name']
    storm_ui_jaas_principal = _storm_ui_jaas_principal_name.replace('_HOST',_hostname_lowercase)

    storm_bare_jaas_principal = get_bare_principal(_storm_principal_name)

    _nimbus_principal_name = config['configurations']['storm-env']['nimbus_principal_name']
    nimbus_jaas_principal = _nimbus_principal_name.replace('_HOST', _hostname_lowercase)
    nimbus_bare_jaas_principal = get_bare_principal(_nimbus_principal_name)
    nimbus_keytab_path = config['configurations']['storm-env']['nimbus_keytab']

kafka_bare_jaas_principal = None
if stack_is_hdp22_or_further:
  if security_enabled:
    storm_thrift_transport = config['configurations']['storm-site']['_storm.thrift.secure.transport']
    # generate KafkaClient jaas config if kafka is kerberoized
    _kafka_principal_name = default("/configurations/kafka-env/kafka_principal_name", None)
    kafka_bare_jaas_principal = get_bare_principal(_kafka_principal_name)

  else:
Beispiel #2
0
  if has_ranger_tagsync:
    ranger_tagsync_principal = config['configurations']['ranger-tagsync-site']['ranger.tagsync.kerberos.principal']
    tagsync_jaas_principal = ranger_tagsync_principal.replace('_HOST', current_host.lower())
    tagsync_keytab_path = config['configurations']['ranger-tagsync-site']['ranger.tagsync.kerberos.keytab']

# logic to create core-site.xml if hdfs not installed
if stack_supports_ranger_kerberos and not has_namenode:
  core_site_property = {
    'hadoop.security.authentication': 'kerberos' if security_enabled else 'simple'
  }

  realm = 'EXAMPLE.COM'
  if security_enabled:
    ranger_admin_principal = config['configurations']['ranger-admin-site']['ranger.admin.kerberos.principal']
    ranger_usersync_principal = config['configurations']['ranger-ugsync-site']['ranger.usersync.kerberos.principal']
    ranger_admin_bare_principal = get_bare_principal(ranger_admin_principal)
    ranger_usersync_bare_principal = get_bare_principal(ranger_usersync_principal)
    realm = config['configurations']['kerberos-env']['realm']

    rule_dict = [
      {'principal': ranger_admin_bare_principal, 'user': unix_user},
      {'principal': ranger_usersync_bare_principal, 'user': '******'},
    ]

    if has_ranger_tagsync:
      ranger_tagsync_bare_principal = get_bare_principal(ranger_tagsync_principal)
      rule_dict.append({'principal': ranger_tagsync_bare_principal, 'user': '******'})

    core_site_auth_to_local_property = ''
    for item in range(len(rule_dict)):
      rule_line = 'RULE:[2:$1@$0]({0}@{1})s/.*/{2}/\n'.format(rule_dict[item]['principal'], realm, rule_dict[item]['user'])
Beispiel #3
0
    _hostname_lowercase = config['hostname'].lower()
    _storm_principal_name = config['configurations']['storm-env'][
        'storm_principal_name']
    storm_jaas_principal = _storm_principal_name.replace(
        '_HOST', _hostname_lowercase)
    storm_keytab_path = config['configurations']['storm-env']['storm_keytab']

    if stack_is_hdp22_or_further:
        storm_ui_keytab_path = config['configurations']['storm-env'][
            'storm_ui_keytab']
        _storm_ui_jaas_principal_name = config['configurations']['storm-env'][
            'storm_ui_principal_name']
        storm_ui_jaas_principal = _storm_ui_jaas_principal_name.replace(
            '_HOST', _hostname_lowercase)

        storm_bare_jaas_principal = get_bare_principal(_storm_principal_name)

        _nimbus_principal_name = config['configurations']['storm-env'][
            'nimbus_principal_name']
        nimbus_jaas_principal = _nimbus_principal_name.replace(
            '_HOST', _hostname_lowercase)
        nimbus_bare_jaas_principal = get_bare_principal(_nimbus_principal_name)
        nimbus_keytab_path = config['configurations']['storm-env'][
            'nimbus_keytab']

kafka_bare_jaas_principal = None
if stack_is_hdp22_or_further:
    if security_enabled:
        storm_thrift_transport = config['configurations']['storm-site'][
            '_storm.thrift.secure.transport']
        # generate KafkaClient jaas config if kafka is kerberoized
Beispiel #4
0
master_hosts = default('/clusterHostInfo/accumulo_master_hosts', [])
monitor_hosts = default('/clusterHostInfo/accumulo_monitor_hosts', [])
gc_hosts = default('/clusterHostInfo/accumulo_gc_hosts', [])
tracer_hosts = default('/clusterHostInfo/accumulo_tracer_hosts', [])

# security properties
accumulo_user_keytab = config['configurations']['accumulo-env'][
    'accumulo_user_keytab']
accumulo_principal_name = config['configurations']['accumulo-env'][
    'accumulo_principal_name']

# kinit properties
kinit_path_local = status_params.kinit_path_local
if security_enabled:
    bare_accumulo_principal = get_bare_principal(
        config['configurations']['accumulo-site']
        ['general.kerberos.principal'])
    kinit_cmd = format(
        "{kinit_path_local} -kt {accumulo_user_keytab} {accumulo_principal_name};"
    )
else:
    kinit_cmd = ""

#for create_hdfs_directory
hostname = status_params.hostname
hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
hdfs_principal_name = config['configurations']['hadoop-env'][
    'hdfs_principal_name']

hdfs_site = config['configurations']['hdfs-site']
Beispiel #5
0
    _hostname_lowercase = config['agentLevelParams']['hostname'].lower()
    _storm_principal_name = config['configurations']['storm-env'][
        'storm_principal_name']
    storm_jaas_principal = _storm_principal_name.replace(
        '_HOST', _hostname_lowercase)
    _ambari_principal_name = default(
        '/configurations/cluster-env/ambari_principal_name', None)
    storm_keytab_path = config['configurations']['storm-env']['storm_keytab']

    storm_ui_keytab_path = config['configurations']['storm-env'][
        'storm_ui_keytab']
    _storm_ui_jaas_principal_name = config['configurations']['storm-env'][
        'storm_ui_principal_name']
    storm_ui_jaas_principal = _storm_ui_jaas_principal_name.replace(
        '_HOST', _hostname_lowercase)
    storm_bare_jaas_principal = get_bare_principal(_storm_principal_name)
    if _ambari_principal_name:
        ambari_bare_jaas_principal = get_bare_principal(_ambari_principal_name)
    _nimbus_principal_name = config['configurations']['storm-env'][
        'nimbus_principal_name']
    nimbus_jaas_principal = _nimbus_principal_name.replace(
        '_HOST', _hostname_lowercase)
    nimbus_bare_jaas_principal = get_bare_principal(_nimbus_principal_name)
    nimbus_keytab_path = config['configurations']['storm-env']['nimbus_keytab']

    if streamline_installed and 'streamline_principal_name' in config[
            'configurations']['streamline-env']:
        _streamline_principal_name = config['configurations'][
            'streamline-env']['streamline_principal_name']
        streamline_bare_jaas_principal = get_bare_principal(
            _streamline_principal_name)
Beispiel #6
0
hadoop_lib_home = stack_root + '/hadoop/lib/share/hadoop/common'

kafka_hosts = default('/clusterHostInfo/kafka_hosts', [])
from random import shuffle
shuffle(kafka_hosts)
has_kafka = len(kafka_hosts) > 0

# Kafka Jaas configs
kafka_bare_jaas_principal = None
druid_jaas_file = format('{druid_conf_dir}/druid_jaas.conf')
if security_enabled and has_kafka and 'kafka_principal_name' in config[
        'configurations']['kafka-env']:
    # generate KafkaClient jaas config if kafka is kerberoized
    _kafka_principal_name = default(
        "/configurations/kafka-env/kafka_principal_name", None)
    kafka_bare_jaas_principal = get_bare_principal(_kafka_principal_name)
    _hostname_lowercase = config['agentLevelParams']['hostname'].lower()
    _druid_principal_name = config['configurations']['druid-common'][
        'druid.escalator.internalClientPrincipal']
    druid_jaas_principal = _druid_principal_name.replace(
        '_HOST', _hostname_lowercase)
    druid_keytab_path = config['configurations']['druid-common'][
        'druid.escalator.internalClientKeytab']

import os
import multiprocessing

cpu_count = multiprocessing.cpu_count()
mem_bytes = os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES')
mem_gib = int(mem_bytes / (1024**3) * 0.8)
Beispiel #7
0
storm_ui_host = default("/clusterHostInfo/storm_ui_server_hosts", [])

storm_user_nofile_limit = default('/configurations/storm-env/storm_user_nofile_limit', 128000)
storm_user_nproc_limit = default('/configurations/storm-env/storm_user_noproc_limit', 65536)

if security_enabled:
  _hostname_lowercase = config['agentLevelParams']['hostname'].lower()
  _storm_principal_name = config['configurations']['storm-env']['storm_principal_name']
  storm_jaas_principal = _storm_principal_name.replace('_HOST',_hostname_lowercase)
  _ambari_principal_name = default('/configurations/cluster-env/ambari_principal_name', None)
  storm_keytab_path = config['configurations']['storm-env']['storm_keytab']

  storm_ui_keytab_path = config['configurations']['storm-env']['storm_ui_keytab']
  _storm_ui_jaas_principal_name = config['configurations']['storm-env']['storm_ui_principal_name']
  storm_ui_jaas_principal = _storm_ui_jaas_principal_name.replace('_HOST',_hostname_lowercase)
  storm_bare_jaas_principal = get_bare_principal(_storm_principal_name)
  if _ambari_principal_name:
    ambari_bare_jaas_principal = get_bare_principal(_ambari_principal_name)
  _nimbus_principal_name = config['configurations']['storm-env']['nimbus_principal_name']
  nimbus_jaas_principal = _nimbus_principal_name.replace('_HOST', _hostname_lowercase)
  nimbus_bare_jaas_principal = get_bare_principal(_nimbus_principal_name)
  nimbus_keytab_path = config['configurations']['storm-env']['nimbus_keytab']

  if streamline_installed and 'streamline_principal_name' in config['configurations']['streamline-env']:
    _streamline_principal_name = config['configurations']['streamline-env']['streamline_principal_name']
    streamline_bare_jaas_principal = get_bare_principal(_streamline_principal_name)

kafka_bare_jaas_principal = None
if security_enabled:
    # generate KafkaClient jaas config if kafka is kerberized
    _kafka_principal_name = default("/configurations/kafka-env/kafka_principal_name", None)
Beispiel #8
0
    "/configurations/dbks-site/ranger.ks.hsm.partition.password.alias",
    "ranger.kms.hsm.partition.password")
hms_partition_passwd = default(
    "/configurations/kms-env/hsm_partition_password", None)

# kms kerberos from stack 2.5 onward
rangerkms_bare_principal = 'rangerkms'

if stack_supports_ranger_kerberos:
    if security_enabled:
        rangerkms_principal = config['configurations']['dbks-site'][
            'ranger.ks.kerberos.principal']
        rangerkms_keytab = config['configurations']['dbks-site'][
            'ranger.ks.kerberos.keytab']
        if not is_empty(rangerkms_principal) and rangerkms_principal != '':
            rangerkms_bare_principal = get_bare_principal(rangerkms_principal)
            rangerkms_principal = rangerkms_principal.replace(
                '_HOST', kms_host.lower())
    kms_plugin_config['policy.download.auth.users'] = format(
        'keyadmin,{rangerkms_bare_principal}')

custom_ranger_service_config = generate_ranger_service_config(
    config['configurations']['kms-properties'])
if len(custom_ranger_service_config) > 0:
    kms_plugin_config.update(custom_ranger_service_config)

kms_ranger_plugin_repo = {
    'isEnabled': 'true',
    'configs': kms_plugin_config,
    'description': 'kms repo',
    'name': repo_name,
Beispiel #9
0
if security_enabled:
    _hostname_lowercase = config['agentLevelParams']['hostname'].lower()
    _flink_principal_name = config['configurations']['flink-env'][
        'flink_principal_name']
    flink_jaas_principal = _flink_principal_name.replace(
        '_HOST', _hostname_lowercase)
    _ambari_principal_name = default(
        '/configurations/cluster-env/ambari_principal_name', None)
    flink_keytab_path = config['configurations']['flink-env']['flink_keytab']
    flink_kerberos_keytab = config['configurations']['flink-site'][
        'security.kerberos.login.keytab']
    flink_kerberos_principal = config['configurations']['flink-site'][
        'security.kerberos.login.principal']

    if _ambari_principal_name:
        ambari_bare_jaas_principal = get_bare_principal(_ambari_principal_name)

jdk_location = config['ambariLevelParams']['jdk_location']
namenode_hosts = default("/clusterHostInfo/namenode_hosts", [])
has_namenode = not len(namenode_hosts) == 0

hdfs_user = config['configurations']['hadoop-env'][
    'hdfs_user'] if has_namenode else None
hdfs_user_keytab = config['configurations']['hadoop-env'][
    'hdfs_user_keytab'] if has_namenode else None
hdfs_principal_name = config['configurations']['hadoop-env'][
    'hdfs_principal_name'] if has_namenode else None
hdfs_site = config['configurations']['hdfs-site'] if has_namenode else None
default_fs = config['configurations']['core-site'][
    'fs.defaultFS'] if has_namenode else None
hadoop_bin_dir = stack_select.get_hadoop_dir("bin") if has_namenode else None
Beispiel #10
0
    core_site_property = {
        'hadoop.security.authentication':
        'kerberos' if security_enabled else 'simple'
    }

    if security_enabled:
        realm = 'EXAMPLE.COM'
        ranger_admin_bare_principal = 'rangeradmin'
        ranger_usersync_bare_principal = 'rangerusersync'
        ranger_tagsync_bare_principal = 'rangertagsync'

        ranger_usersync_principal = config['configurations'][
            'ranger-ugsync-site']['ranger.usersync.kerberos.principal']
        if not is_empty(
                ranger_admin_principal) and ranger_admin_principal != '':
            ranger_admin_bare_principal = get_bare_principal(
                ranger_admin_principal)
        if not is_empty(
                ranger_usersync_principal) and ranger_usersync_principal != '':
            ranger_usersync_bare_principal = get_bare_principal(
                ranger_usersync_principal)
        realm = config['configurations']['kerberos-env']['realm']

        rule_dict = [
            {
                'principal': ranger_admin_bare_principal,
                'user': unix_user
            },
            {
                'principal': ranger_usersync_bare_principal,
                'user': '******'
            },
Beispiel #11
0
    "/configurations/dbks-site/ranger.ks.hsm.partition.password.alias",
    "ranger.kms.hsm.partition.password")
hms_partition_passwd = default(
    "/configurations/kms-env/hsm_partition_password", None)

# kms kerberos from stack 2.5 onward
rangerkms_bare_principal = 'rangerkms'

if stack_supports_ranger_kerberos:
    if security_enabled:
        rangerkms_principal = config['configurations']['dbks-site'][
            'ranger.ks.kerberos.principal']
        rangerkms_keytab = config['configurations']['dbks-site'][
            'ranger.ks.kerberos.keytab']
        if not is_empty(rangerkms_principal) and rangerkms_principal != '':
            rangerkms_bare_principal = get_bare_principal(rangerkms_principal)
            rangerkms_principal = rangerkms_principal.replace(
                '_HOST', kms_host.lower())
    kms_plugin_config['policy.download.auth.users'] = format(
        'keyadmin,{rangerkms_bare_principal}')

custom_ranger_service_config = generate_ranger_service_config(
    config['configurations']['kms-properties'])
if len(custom_ranger_service_config) > 0:
    kms_plugin_config.update(custom_ranger_service_config)

kms_ranger_plugin_repo = {
    'isEnabled': 'true',
    'configs': kms_plugin_config,
    'description': 'kms repo',
    'name': repo_name,
Beispiel #12
0
    def recommendStormConfigurationsFromHDP25(self, configurations,
                                              clusterData, services, hosts):
        storm_site = self.getServicesSiteProperties(services, "storm-site")
        storm_env = self.getServicesSiteProperties(services, "storm-env")
        putStormSiteProperty = self.putProperty(configurations, "storm-site",
                                                services)
        putStormSiteAttributes = self.putPropertyAttribute(
            configurations, "storm-site")
        security_enabled = self.isSecurityEnabled(services)

        if storm_env and storm_site:
            if security_enabled:
                _storm_principal_name = storm_env[
                    'storm_principal_name'] if 'storm_principal_name' in storm_env else None
                storm_bare_jaas_principal = get_bare_principal(
                    _storm_principal_name)
                if 'nimbus.impersonation.acl' in storm_site:
                    storm_nimbus_impersonation_acl = storm_site[
                        "nimbus.impersonation.acl"]
                    storm_nimbus_impersonation_acl.replace(
                        '{{storm_bare_jaas_principal}}',
                        storm_bare_jaas_principal)
                    putStormSiteProperty('nimbus.impersonation.acl',
                                         storm_nimbus_impersonation_acl)
            else:
                if 'nimbus.impersonation.acl' in storm_site:
                    putStormSiteAttributes('nimbus.impersonation.acl',
                                           'delete', 'true')
                if 'nimbus.impersonation.authorizer' in storm_site:
                    putStormSiteAttributes('nimbus.impersonation.authorizer',
                                           'delete', 'true')

        rangerPluginEnabled = ''
        if 'ranger-storm-plugin-properties' in configurations and 'ranger-storm-plugin-enabled' in configurations[
                'ranger-storm-plugin-properties']['properties']:
            rangerPluginEnabled = configurations[
                'ranger-storm-plugin-properties']['properties'][
                    'ranger-storm-plugin-enabled']
        elif 'ranger-storm-plugin-properties' in services[
                'configurations'] and 'ranger-storm-plugin-enabled' in services[
                    'configurations']['ranger-storm-plugin-properties'][
                        'properties']:
            rangerPluginEnabled = services['configurations'][
                'ranger-storm-plugin-properties']['properties'][
                    'ranger-storm-plugin-enabled']

        storm_authorizer_class = 'org.apache.storm.security.auth.authorizer.SimpleACLAuthorizer'
        ranger_authorizer_class = 'org.apache.ranger.authorization.storm.authorizer.RangerStormAuthorizer'
        # Cluster is kerberized
        if security_enabled:
            if rangerPluginEnabled and (rangerPluginEnabled.lower()
                                        == 'Yes'.lower()):
                putStormSiteProperty('nimbus.authorizer',
                                     ranger_authorizer_class)
            else:
                putStormSiteProperty('nimbus.authorizer',
                                     storm_authorizer_class)
        else:
            putStormSiteAttributes('nimbus.authorizer', 'delete', 'true')

        servicesList = [
            service["StackServices"]["service_name"]
            for service in services["services"]
        ]
        # Storm AMS integration
        if 'AMBARI_METRICS' in servicesList:
            putStormSiteProperty(
                'storm.cluster.metrics.consumer.register',
                '[{"class": "org.apache.hadoop.metrics2.sink.storm.StormTimelineMetricsReporter"}]'
            )
            putStormSiteProperty(
                'topology.metrics.consumer.register',
                '[{"class": "org.apache.hadoop.metrics2.sink.storm.StormTimelineMetricsSink", '
                '"parallelism.hint": 1, '
                '"whitelist": ["kafkaOffset\\\..+/", "__complete-latency", "__process-latency", '
                '"__execute-latency", '
                '"__receive\\\.population$", "__sendqueue\\\.population$", "__execute-count", "__emit-count", '
                '"__ack-count", "__fail-count", "memory/heap\\\.usedBytes$", "memory/nonHeap\\\.usedBytes$", '
                '"GC/.+\\\.count$", "GC/.+\\\.timeMs$"]}]')
        else:
            putStormSiteProperty('storm.cluster.metrics.consumer.register',
                                 'null')
            putStormSiteProperty('topology.metrics.consumer.register', 'null')
    def recommendStreamlineConfigurations(self, configurations, clusterData,
                                          services, hosts):
        super(HDF30StackAdvisor,
              self).recommendRangerConfigurations(configurations, clusterData,
                                                  services, hosts)
        servicesList = [
            service["StackServices"]["service_name"]
            for service in services["services"]
        ]
        security_enabled = self.isSecurityEnabled(services)
        if 'STORM' in servicesList and security_enabled:
            storm_site = self.getServicesSiteProperties(services, "storm-site")
            if storm_site is not None:
                putStormSiteProperty = self.putProperty(
                    configurations, "storm-site", services)
                putStormSiteAttributes = self.putPropertyAttribute(
                    configurations, "storm-site")
                storm_env = self.getServicesSiteProperties(
                    services, "storm-env")
                storm_nimbus_impersonation_acl = storm_site[
                    "nimbus.impersonation.acl"] if "nimbus.impersonation.acl" in storm_site else None
                streamline_env = self.getServicesSiteProperties(
                    services, "streamline-env")
                _streamline_principal_name = streamline_env[
                    'streamline_principal_name'] if 'streamline_principal_name' in streamline_env else None
                if _streamline_principal_name is not None and storm_nimbus_impersonation_acl is not None:
                    streamline_bare_principal = get_bare_principal(
                        _streamline_principal_name)
                    storm_nimbus_impersonation_acl = storm_nimbus_impersonation_acl.replace(
                        '{{streamline_bare_principal}}',
                        streamline_bare_principal)
                    putStormSiteProperty('nimbus.impersonation.acl',
                                         storm_nimbus_impersonation_acl)

                storm_nimbus_autocred_plugin_classes = storm_site[
                    "nimbus.autocredential.plugins.classes"] if "nimbus.autocredential.plugins.classes" in storm_site else None
                if storm_nimbus_autocred_plugin_classes is not None:
                    new_storm_nimbus_autocred_plugin_classes = [
                        'org.apache.storm.hdfs.security.AutoHDFS',
                        'org.apache.storm.hbase.security.AutoHBase',
                        'org.apache.storm.hive.security.AutoHive'
                    ]
                    new_conf = DefaultStackAdvisor.appendToYamlString(
                        storm_nimbus_autocred_plugin_classes,
                        new_storm_nimbus_autocred_plugin_classes)

                    putStormSiteProperty(
                        "nimbus.autocredential.plugins.classes", new_conf)
                else:
                    putStormSiteProperty(
                        "nimbus.autocredential.plugins.classes",
                        "['org.apache.storm.hdfs.security.AutoHDFS', 'org.apache.storm.hbase.security.AutoHBase', 'org.apache.storm.hive.security.AutoHive']"
                    )

                storm_nimbus_credential_renewer_classes = storm_site[
                    "nimbus.credential.renewers.classes"] if "nimbus.credential.renewers.classes" in storm_site else None
                if storm_nimbus_credential_renewer_classes is not None:
                    new_storm_nimbus_credential_renewer_classes_array = [
                        'org.apache.storm.hdfs.security.AutoHDFS',
                        'org.apache.storm.hbase.security.AutoHBase',
                        'org.apache.storm.hive.security.AutoHive'
                    ]
                    new_conf = DefaultStackAdvisor.appendToYamlString(
                        storm_nimbus_credential_renewer_classes,
                        new_storm_nimbus_credential_renewer_classes_array)
                    putStormSiteProperty(
                        "nimbus.autocredential.plugins.classes", new_conf)
                else:
                    putStormSiteProperty(
                        "nimbus.credential.renewers.classes",
                        "['org.apache.storm.hdfs.security.AutoHDFS', 'org.apache.storm.hbase.security.AutoHBase', 'org.apache.storm.hive.security.AutoHive']"
                    )
                putStormSiteProperty("nimbus.credential.renewers.freq.secs",
                                     "82800")
Beispiel #14
0
    def getServiceConfigurationRecommendations(self, configurations,
                                               clusterData, services, hosts):

        Logger.info(
            "Class: %s, Method: %s. Get Service Configuration Recommendations."
            % (self.__class__.__name__, inspect.stack()[0][3]))
        servicesList = [
            service["StackServices"]["service_name"]
            for service in services["services"]
        ]
        security_enabled = self.isSecurityEnabled(services)

        if 'AMBARI_METRICS' in servicesList:
            putAmsSiteProperty = self.putProperty(configurations, "ams-site")
            putAmsSiteProperty(
                'timeline.metrics.downsampler.event.metric.patterns',
                'topology\.%')

        if 'STORM' in servicesList and security_enabled:
            storm_site = self.getServicesSiteProperties(services, "storm-site")
            streamline_env = self.getServicesSiteProperties(
                services, "streamline-env")
            if storm_site is not None and streamline_env is not None:
                putStormSiteProperty = self.putProperty(
                    configurations, "storm-site", services)
                putStormSiteAttributes = self.putPropertyAttribute(
                    configurations, "storm-site")
                storm_env = self.getServicesSiteProperties(
                    services, "storm-env")
                storm_nimbus_impersonation_acl = storm_site[
                    "nimbus.impersonation.acl"] if "nimbus.impersonation.acl" in storm_site else None
                streamline_env = self.getServicesSiteProperties(
                    services, "streamline-env")
                _streamline_principal_name = streamline_env[
                    'streamline_principal_name'] if 'streamline_principal_name' in streamline_env else None

                if _streamline_principal_name is not None and storm_nimbus_impersonation_acl is not None:
                    streamline_bare_principal = get_bare_principal(
                        _streamline_principal_name)
                    storm_nimbus_impersonation_acl = "{ " + streamline_bare_principal + " : {hosts: ['*'], groups: ['*']}, {{storm_bare_jaas_principal}} : {hosts: ['*'], groups: ['*']}}"
                    putStormSiteProperty('nimbus.impersonation.acl',
                                         storm_nimbus_impersonation_acl)

                storm_nimbus_autocred_plugin_classes = storm_site[
                    "nimbus.autocredential.plugins.classes"] if "nimbus.autocredential.plugins.classes" in storm_site else None

                # Here storm_nimbus_autocred_plugin_classes is resulting in none. There is no nimbus.autocredential.plugins.classes in storm-site.xml

                if storm_nimbus_autocred_plugin_classes is not None:
                    new_storm_nimbus_autocred_plugin_classes = [
                        'org.apache.storm.hdfs.security.AutoHDFS',
                        'org.apache.storm.hbase.security.AutoHBase',
                        'org.apache.storm.hive.security.AutoHive'
                    ]

                    new_conf = self.appendToYamlString(
                        storm_nimbus_autocred_plugin_classes,
                        new_storm_nimbus_autocred_plugin_classes)
                    putStormSiteProperty(
                        "nimbus.autocredential.plugins.classes", new_conf)

                else:
                    putStormSiteProperty(
                        "nimbus.autocredential.plugins.classes",
                        "['org.apache.storm.hdfs.security.AutoHDFS', 'org.apache.storm.hbase.security.AutoHBase', 'org.apache.storm.hive.security.AutoHive']"
                    )

                storm_nimbus_credential_renewer_classes = storm_site[
                    "nimbus.credential.renewers.classes"] if "nimbus.credential.renewers.classes" in storm_site else None

                if storm_nimbus_credential_renewer_classes is not None:
                    new_storm_nimbus_credential_renewer_classes_array = [
                        'org.apache.storm.hdfs.security.AutoHDFS',
                        'org.apache.storm.hbase.security.AutoHBase',
                        'org.apache.storm.hive.security.AutoHive'
                    ]
                    new_conf = self.appendToYamlString(
                        storm_nimbus_credential_renewer_classes,
                        new_storm_nimbus_credential_renewer_classes_array)
                    putStormSiteProperty(
                        "nimbus.autocredential.plugins.classes", new_conf)
                else:
                    putStormSiteProperty(
                        "nimbus.credential.renewers.classes",
                        "['org.apache.storm.hdfs.security.AutoHDFS', 'org.apache.storm.hbase.security.AutoHBase', 'org.apache.storm.hive.security.AutoHive']"
                    )
                putStormSiteProperty("nimbus.credential.renewers.freq.secs",
                                     "82800")

        properties = get_ambari_properties()
        ambari_version = get_ambari_version(properties)
        if not (ambari_version) or not (ambari_version.startswith('2.5')):
            putStreamlineLogSearchConfAttribute = self.putPropertyAttribute(
                configurations, "streamline-logsearch-conf")
            putStreamlineLogSearchConfAttribute('service_name', 'visible',
                                                'false')
            putStreamlineLogSearchConfAttribute('component_mappings',
                                                'visible', 'false')
            putStreamlineLogSearchConfAttribute('content', 'visible', 'false')
        pass
Beispiel #15
0
  tserver_hosts = default('/clusterHostInfo/accumulo_tserver_hosts', '/clusterHostInfo/slave_hosts')
else:
  tserver_hosts = default('/clusterHostInfo/accumulo_tserver_hosts', '/clusterHostInfo/all_hosts')
master_hosts = default('/clusterHostInfo/accumulo_master_hosts', [])
monitor_hosts = default('/clusterHostInfo/accumulo_monitor_hosts', [])
gc_hosts = default('/clusterHostInfo/accumulo_gc_hosts', [])
tracer_hosts = default('/clusterHostInfo/accumulo_tracer_hosts', [])

# security properties
accumulo_user_keytab = config['configurations']['accumulo-env']['accumulo_user_keytab']
accumulo_principal_name = config['configurations']['accumulo-env']['accumulo_principal_name']

# kinit properties
kinit_path_local = status_params.kinit_path_local
if security_enabled:
  bare_accumulo_principal = get_bare_principal(config['configurations']['accumulo-site']['general.kerberos.principal'])
  kinit_cmd = format("{kinit_path_local} -kt {accumulo_user_keytab} {accumulo_principal_name};")
else:
  kinit_cmd = ""

host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)

#for create_hdfs_directory
hostname = status_params.hostname
hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']



hdfs_site = config['configurations']['hdfs-site']