def create_credential_providers():
    import params

    if not os.path.exists(params.conf_dir):
        os.makedirs(params.conf_dir)

    for c in [
            'data_analytics_studio-database',
            'data_analytics_studio-properties',
            'data_analytics_studio-security-site'
    ]:
        update_credential_provider_path(
            params.config['configurations'][c], c,
            os.path.join(params.conf_dir,
                         c + '.jceks'), params.data_analytics_studio_user,
            params.data_analytics_studio_group)
示例#2
0
文件: oozie.py 项目: z00251311/ambari
def oozie_server_specific(upgrade_type):
  import params
  
  no_op_test = as_user(format("ls {pid_file} >/dev/null 2>&1 && ps -p `cat {pid_file}` >/dev/null 2>&1"), user=params.oozie_user)
  
  File(params.pid_file,
    action="delete",
    not_if=no_op_test
  )
  
  oozie_server_directories = [format("{oozie_home}/{oozie_tmp_dir}"), params.oozie_pid_dir, params.oozie_log_dir, params.oozie_tmp_dir, params.oozie_data_dir, params.oozie_lib_dir, params.oozie_webapps_dir, params.oozie_webapps_conf_dir, params.oozie_server_dir]
  Directory( oozie_server_directories,
    owner = params.oozie_user,
    group = params.user_group,
    mode = 0755,
    create_parents = True,
    cd_access="a",
  )
  
  Directory(params.oozie_libext_dir,
            create_parents = True,
  )
  
  hashcode_file = format("{oozie_home}/.hashcode")
  skip_recreate_sharelib = format("test -f {hashcode_file} && test -d {oozie_home}/share")

  untar_sharelib = ('tar','-xvf',format('{oozie_home}/oozie-sharelib.tar.gz'),'-C',params.oozie_home)

  Execute( untar_sharelib,    # time-expensive
    not_if  = format("{no_op_test} || {skip_recreate_sharelib}"), 
    sudo = True,
  )

  configure_cmds = []
  # Default to /usr/share/$TARGETSTACK-oozie/ext-2.2.zip as the first path
  source_ext_zip_paths = get_oozie_ext_zip_source_paths(upgrade_type, params)
  
  # Copy the first oozie ext-2.2.zip file that is found.
  # This uses a list to handle the cases when migrating from some versions of BigInsights to HDP.
  if source_ext_zip_paths is not None:
    for source_ext_zip_path in source_ext_zip_paths:
      if os.path.isfile(source_ext_zip_path):
        configure_cmds.append(('cp', source_ext_zip_path, params.oozie_libext_dir))
        configure_cmds.append(('chown', format('{oozie_user}:{user_group}'), format('{oozie_libext_dir}/{ext_js_file}')))

        Execute(configure_cmds,
                not_if=no_op_test,
                sudo=True,
                )
        break
  
  
  Directory(params.oozie_webapps_conf_dir,
            owner = params.oozie_user,
            group = params.user_group,
            recursive_ownership = True,
            recursion_follow_links = True,
  )

  # download the database JAR
  download_database_library_if_needed()

  #falcon el extension
  if params.has_falcon_host:
    Execute(format('{sudo} cp {falcon_home}/oozie/ext/falcon-oozie-el-extension-*.jar {oozie_libext_dir}'),
      not_if  = no_op_test)

    Execute(format('{sudo} chown {oozie_user}:{user_group} {oozie_libext_dir}/falcon-oozie-el-extension-*.jar'),
      not_if  = no_op_test)

  if params.lzo_enabled and len(params.all_lzo_packages) > 0:
    Package(params.all_lzo_packages,
            retry_on_repo_unavailability=params.agent_stack_retry_on_unavailability,
            retry_count=params.agent_stack_retry_count)
    Execute(format('{sudo} cp {hadoop_lib_home}/hadoop-lzo*.jar {oozie_lib_dir}'),
      not_if  = no_op_test,
    )

  prepare_war(params)

  File(hashcode_file,
       mode = 0644,
  )

  if params.stack_version_formatted and check_stack_feature(StackFeature.OOZIE_CREATE_HIVE_TEZ_CONFIGS, params.stack_version_formatted):
    # Create hive-site and tez-site configs for oozie
    Directory(params.hive_conf_dir,
        create_parents = True,
        owner = params.oozie_user,
        group = params.user_group
    )
    if 'hive-site' in params.config['configurations']:
      hive_site_config = update_credential_provider_path(params.config['configurations']['hive-site'],
                                                         'hive-site',
                                                         os.path.join(params.hive_conf_dir, 'hive-site.jceks'),
                                                         params.oozie_user,
                                                         params.user_group
                                                         )
      XmlConfig("hive-site.xml",
        conf_dir=params.hive_conf_dir,
        configurations=hive_site_config,
        configuration_attributes=params.config['configuration_attributes']['hive-site'],
        owner=params.oozie_user,
        group=params.user_group,
        mode=0644
    )
    if 'tez-site' in params.config['configurations']:
      XmlConfig( "tez-site.xml",
        conf_dir = params.hive_conf_dir,
        configurations = params.config['configurations']['tez-site'],
        configuration_attributes=params.config['configuration_attributes']['tez-site'],
        owner = params.oozie_user,
        group = params.user_group,
        mode = 0664
    )

    # If Atlas is also installed, need to generate Atlas Hive hook (hive-atlas-application.properties file) in directory
    # {stack_root}/{current_version}/atlas/hook/hive/
    # Because this is a .properties file instead of an xml file, it will not be read automatically by Oozie.
    # However, should still save the file on this host so that can upload it to the Oozie Sharelib in DFS.
    if has_atlas_in_cluster():
      atlas_hook_filepath = os.path.join(params.hive_conf_dir, params.atlas_hook_filename)
      Logger.info("Has atlas in cluster, will save Atlas Hive hook into location %s" % str(atlas_hook_filepath))
      setup_atlas_hook(SERVICE.HIVE, params.hive_atlas_application_properties, atlas_hook_filepath, params.oozie_user, params.user_group)

  Directory(params.oozie_server_dir,
    owner = params.oozie_user,
    group = params.user_group,
    recursive_ownership = True,  
  )
  if params.security_enabled:
    File(os.path.join(params.conf_dir, 'zkmigrator_jaas.conf'),
         owner=params.oozie_user,
         group=params.user_group,
         content=Template("zkmigrator_jaas.conf.j2")
         )
示例#3
0
文件: oozie.py 项目: z00251311/ambari
def oozie(is_server=False, upgrade_type=None):
  import params

  if is_server:
    params.HdfsResource(params.oozie_hdfs_user_dir,
                         type="directory",
                         action="create_on_execute",
                         owner=params.oozie_user,
                         mode=params.oozie_hdfs_user_mode
    )
    params.HdfsResource(None, action="execute")
  Directory(params.conf_dir,
             create_parents = True,
             owner = params.oozie_user,
             group = params.user_group
  )

  params.oozie_site = update_credential_provider_path(params.oozie_site,
                                                      'oozie-site',
                                                      os.path.join(params.conf_dir, 'oozie-site.jceks'),
                                                      params.oozie_user,
                                                      params.user_group
                                                      )

  XmlConfig("oozie-site.xml",
    conf_dir = params.conf_dir,
    configurations = params.oozie_site,
    configuration_attributes=params.config['configuration_attributes']['oozie-site'],
    owner = params.oozie_user,
    group = params.user_group,
    mode = 0664
  )
  File(format("{conf_dir}/oozie-env.sh"),
    owner=params.oozie_user,
    content=InlineTemplate(params.oozie_env_sh_template),
    group=params.user_group,
  )

  # On some OS this folder could be not exists, so we will create it before pushing there files
  Directory(params.limits_conf_dir,
            create_parents=True,
            owner='root',
            group='root'
  )

  File(os.path.join(params.limits_conf_dir, 'oozie.conf'),
       owner='root',
       group='root',
       mode=0644,
       content=Template("oozie.conf.j2")
  )

  if (params.log4j_props != None):
    File(format("{params.conf_dir}/oozie-log4j.properties"),
      mode=0644,
      group=params.user_group,
      owner=params.oozie_user,
      content=InlineTemplate(params.log4j_props)
    )
  elif (os.path.exists(format("{params.conf_dir}/oozie-log4j.properties"))):
    File(format("{params.conf_dir}/oozie-log4j.properties"),
      mode=0644,
      group=params.user_group,
      owner=params.oozie_user
    )

  if params.stack_version_formatted and check_stack_feature(StackFeature.OOZIE_ADMIN_USER, params.stack_version_formatted):
    File(format("{params.conf_dir}/adminusers.txt"),
      mode=0644,
      group=params.user_group,
      owner=params.oozie_user,
      content=Template('adminusers.txt.j2', oozie_admin_users=params.oozie_admin_users)
    )
  else:
    File ( format("{params.conf_dir}/adminusers.txt"),
           owner = params.oozie_user,
           group = params.user_group
    )

  if params.jdbc_driver_name == "com.mysql.jdbc.Driver" or \
     params.jdbc_driver_name == "com.microsoft.sqlserver.jdbc.SQLServerDriver" or \
     params.jdbc_driver_name == "org.postgresql.Driver" or \
     params.jdbc_driver_name == "oracle.jdbc.driver.OracleDriver":
    File(format("/usr/lib/ambari-agent/{check_db_connection_jar_name}"),
      content = DownloadSource(format("{jdk_location}{check_db_connection_jar_name}")),
    )
  pass

  oozie_ownership()
  
  if is_server:
    oozie_server_specific(upgrade_type)
示例#4
0
def setup_logsearch():
  import params

  Directory([params.logsearch_log_dir, params.logsearch_pid_dir],
            mode=0755,
            cd_access='a',
            owner=params.logsearch_user,
            group=params.user_group,
            create_parents=True
            )

  Directory([params.logsearch_dir, params.logsearch_server_conf, params.logsearch_config_set_dir],
            mode=0755,
            cd_access='a',
            owner=params.logsearch_user,
            group=params.user_group,
            create_parents=True,
            recursive_ownership=True
            )

  Directory(params.logsearch_server_keys_folder,
            cd_access='a',
            mode=0755,
            owner=params.logsearch_user,
            group=params.user_group)

  File(params.logsearch_log,
       mode=0644,
       owner=params.logsearch_user,
       group=params.user_group,
       content=''
       )

  params.logsearch_env_config = update_credential_provider_path(params.logsearch_env_config,
                                                                'logsearch-env',
                                                                params.logsearch_env_jceks_file,
                                                                params.logsearch_user,
                                                                params.user_group
                                                                )
  params.logsearch_properties[HADOOP_CREDENTIAL_PROVIDER_PROPERTY_NAME] = 'jceks://file' + params.logsearch_env_jceks_file
  PropertiesFile(format("{logsearch_server_conf}/logsearch.properties"),
                 properties=params.logsearch_properties
                 )

  File(format("{logsearch_server_conf}/HadoopServiceConfig.json"),
       content=Template("HadoopServiceConfig.json.j2"),
       owner=params.logsearch_user,
       group=params.user_group
       )

  File(format("{logsearch_server_conf}/log4j.xml"),
       content=InlineTemplate(params.logsearch_app_log4j_content),
       owner=params.logsearch_user,
       group=params.user_group
       )

  File(format("{logsearch_server_conf}/logsearch-env.sh"),
       content=InlineTemplate(params.logsearch_env_content),
       mode=0755,
       owner=params.logsearch_user,
       group=params.user_group
       )

  File(format("{logsearch_server_conf}/logsearch-admin.json"),
       content=InlineTemplate(params.logsearch_admin_content),
       owner=params.logsearch_user,
       group=params.user_group
       )

  File(format("{logsearch_config_set_dir}/hadoop_logs/conf/solrconfig.xml"),
       content=InlineTemplate(params.logsearch_service_logs_solrconfig_content),
       owner=params.logsearch_user,
       group=params.user_group
       )

  File(format("{logsearch_config_set_dir}/audit_logs/conf/solrconfig.xml"),
       content=InlineTemplate(params.logsearch_audit_logs_solrconfig_content),
       owner=params.logsearch_user,
       group=params.user_group
       )

  if params.security_enabled:
    File(format("{logsearch_jaas_file}"),
         content=Template("logsearch_jaas.conf.j2"),
         owner=params.logsearch_user
         )
  Execute(("chmod", "-R", "ugo+r", format("{logsearch_server_conf}/solr_configsets")),
          sudo=True
          )
  check_znode()

  if params.security_enabled and not params.logsearch_use_external_solr:
    solr_cloud_util.add_solr_roles(params.config,
                                   roles = [params.infra_solr_role_logsearch, params.infra_solr_role_ranger_admin, params.infra_solr_role_dev],
                                   new_service_principals = [params.logsearch_kerberos_principal])
    solr_cloud_util.add_solr_roles(params.config,
                                   roles = [params.infra_solr_role_logfeeder, params.infra_solr_role_dev],
                                   new_service_principals = [params.logfeeder_kerberos_principal])
示例#5
0
文件: hive.py 项目: jtstorck/ambari
def hive(name=None):
  import params

  hive_client_conf_path = format("{stack_root}/current/{component_directory}/conf")
  # Permissions 644 for conf dir (client) files, and 600 for conf.server
  mode_identified = 0644 if params.hive_config_dir == hive_client_conf_path else 0600

  Directory(params.hive_etc_dir_prefix,
            mode=0755
  )

  # We should change configurations for client as well as for server.
  # The reason is that stale-configs are service-level, not component.
  Logger.info("Directories to fill with configs: %s" % str(params.hive_conf_dirs_list))
  for conf_dir in params.hive_conf_dirs_list:
    fill_conf_dir(conf_dir)

  params.hive_site_config = update_credential_provider_path(params.hive_site_config,
                                                     'hive-site',
                                                     os.path.join(params.hive_conf_dir, 'hive-site.jceks'),
                                                     params.hive_user,
                                                     params.user_group
                                                     )
  XmlConfig("hive-site.xml",
            conf_dir=params.hive_config_dir,
            configurations=params.hive_site_config,
            configuration_attributes=params.config['configuration_attributes']['hive-site'],
            owner=params.hive_user,
            group=params.user_group,
            mode=mode_identified)

  # Generate atlas-application.properties.xml file
  if params.enable_atlas_hook:
    atlas_hook_filepath = os.path.join(params.hive_config_dir, params.atlas_hook_filename)
    setup_atlas_hook(SERVICE.HIVE, params.hive_atlas_application_properties, atlas_hook_filepath, params.hive_user, params.user_group)
  
  File(format("{hive_config_dir}/hive-env.sh"),
       owner=params.hive_user,
       group=params.user_group,
       content=InlineTemplate(params.hive_env_sh_template),
       mode=mode_identified
  )

  # On some OS this folder could be not exists, so we will create it before pushing there files
  Directory(params.limits_conf_dir,
            create_parents = True,
            owner='root',
            group='root'
            )

  File(os.path.join(params.limits_conf_dir, 'hive.conf'),
       owner='root',
       group='root',
       mode=0644,
       content=Template("hive.conf.j2")
       )
  if params.security_enabled:
    File(os.path.join(params.hive_config_dir, 'zkmigrator_jaas.conf'),
         owner=params.hive_user,
         group=params.user_group,
         content=Template("zkmigrator_jaas.conf.j2")
         )

  File(format("/usr/lib/ambari-agent/{check_db_connection_jar_name}"),
       content = DownloadSource(format("{jdk_location}{check_db_connection_jar_name}")),
       mode = 0644,
  )

  if name != "client":
    setup_non_client()
  if name == 'hiveserver2':
    setup_hiveserver2()
  if name == 'metastore':
    setup_metastore()
示例#6
0
def hive_interactive(name=None):
    import params
    MB_TO_BYTES = 1048576

    # if warehouse directory is in DFS
    if not params.whs_dir_protocol or params.whs_dir_protocol == urlparse(
            params.default_fs).scheme:
        # Create Hive Metastore Warehouse Dir
        params.HdfsResource(params.hive_apps_whs_dir,
                            type="directory",
                            action="create_on_execute",
                            owner=params.hive_user,
                            group=params.user_group,
                            mode=params.hive_apps_whs_mode)
    else:
        Logger.info(
            format(
                "Not creating warehouse directory '{hive_apps_whs_dir}', as the location is not in DFS."
            ))

    # Create Hive User Dir
    params.HdfsResource(params.hive_hdfs_user_dir,
                        type="directory",
                        action="create_on_execute",
                        owner=params.hive_user,
                        mode=params.hive_hdfs_user_mode)

    # list of properties that should be excluded from the config
    # this approach is a compromise against adding a dedicated config
    # type for hive_server_interactive or needed config groups on a
    # per component basis
    exclude_list = ['hive.enforce.bucketing', 'hive.enforce.sorting']

    # List of configs to be excluded from hive2 client, but present in Hive2 server.
    exclude_list_for_hive2_client = [
        'javax.jdo.option.ConnectionPassword',
        'hadoop.security.credential.provider.path'
    ]

    # Copy Tarballs in HDFS.
    if params.stack_version_formatted_major and check_stack_feature(
            StackFeature.ROLLING_UPGRADE,
            params.stack_version_formatted_major):
        resource_created = copy_to_hdfs(
            "tez_hive2",
            params.user_group,
            params.hdfs_user,
            file_mode=params.tarballs_mode,
            skip=params.sysprep_skip_copy_tarballs_hdfs)

        if resource_created:
            params.HdfsResource(None, action="execute")

    Directory(params.hive_interactive_etc_dir_prefix, mode=0755)

    Logger.info("Directories to fill with configs: %s" %
                str(params.hive_conf_dirs_list))
    for conf_dir in params.hive_conf_dirs_list:
        fill_conf_dir(conf_dir)
    '''
  As hive2/hive-site.xml only contains the new + the changed props compared to hive/hive-site.xml,
  we need to merge hive/hive-site.xml and hive2/hive-site.xml and store it in hive2/hive-site.xml.
  '''
    merged_hive_interactive_site = {}
    merged_hive_interactive_site.update(
        params.config['configurations']['hive-site'])
    merged_hive_interactive_site.update(
        params.config['configurations']['hive-interactive-site'])
    for item in exclude_list:
        if item in merged_hive_interactive_site.keys():
            del merged_hive_interactive_site[item]
    '''
  Config 'hive.llap.io.memory.size' calculated value in stack_advisor is in MB as of now. We need to
  convert it to bytes before we write it down to config file.
  '''
    if 'hive.llap.io.memory.size' in merged_hive_interactive_site.keys():
        hive_llap_io_mem_size_in_mb = merged_hive_interactive_site.get(
            "hive.llap.io.memory.size")
        hive_llap_io_mem_size_in_bytes = long(
            hive_llap_io_mem_size_in_mb) * MB_TO_BYTES
        merged_hive_interactive_site[
            'hive.llap.io.memory.size'] = hive_llap_io_mem_size_in_bytes
        Logger.info(
            "Converted 'hive.llap.io.memory.size' value from '{0} MB' to '{1} Bytes' before writing "
            "it to config file.".format(hive_llap_io_mem_size_in_mb,
                                        hive_llap_io_mem_size_in_bytes))
    '''
  Hive2 doesn't have support for Atlas, we need to remove the Hook 'org.apache.atlas.hive.hook.HiveHook',
  which would have come in config 'hive.exec.post.hooks' during the site merge logic, if Atlas is installed.
  '''
    # Generate atlas-application.properties.xml file
    if params.enable_atlas_hook and params.stack_supports_atlas_hook_for_hive_interactive:
        Logger.info("Setup for Atlas Hive2 Hook started.")

        atlas_hook_filepath = os.path.join(
            params.hive_server_interactive_conf_dir,
            params.atlas_hook_filename)
        setup_atlas_hook(SERVICE.HIVE,
                         params.hive_atlas_application_properties,
                         atlas_hook_filepath, params.hive_user,
                         params.user_group)

        Logger.info("Setup for Atlas Hive2 Hook done.")
    else:
        # Required for HDP 2.5 stacks
        Logger.info(
            "Skipping setup for Atlas Hook, as it is disabled/ not supported.")
        remove_atlas_hook_if_exists(merged_hive_interactive_site)
    '''
  As tez_hive2/tez-site.xml only contains the new + the changed props compared to tez/tez-site.xml,
  we need to merge tez/tez-site.xml and tez_hive2/tez-site.xml and store it in tez_hive2/tez-site.xml.
  '''
    merged_tez_interactive_site = {}
    if 'tez-site' in params.config['configurations']:
        merged_tez_interactive_site.update(
            params.config['configurations']['tez-site'])
        Logger.info(
            "Retrieved 'tez/tez-site' for merging with 'tez_hive2/tez-interactive-site'."
        )
    else:
        Logger.error(
            "Tez's 'tez-site' couldn't be retrieved from passed-in configurations."
        )

    merged_tez_interactive_site.update(
        params.config['configurations']['tez-interactive-site'])
    XmlConfig("tez-site.xml",
              conf_dir=params.tez_interactive_config_dir,
              configurations=merged_tez_interactive_site,
              configuration_attributes=params.config['configurationAttributes']
              ['tez-interactive-site'],
              owner=params.tez_interactive_user,
              group=params.user_group,
              mode=0664)
    '''
  Merge properties from hiveserver2-interactive-site into hiveserver2-site
  '''
    merged_hiveserver2_interactive_site = {}
    if 'hiveserver2-site' in params.config['configurations']:
        merged_hiveserver2_interactive_site.update(
            params.config['configurations']['hiveserver2-site'])
        Logger.info(
            "Retrieved 'hiveserver2-site' for merging with 'hiveserver2-interactive-site'."
        )
    else:
        Logger.error(
            "'hiveserver2-site' couldn't be retrieved from passed-in configurations."
        )
    merged_hiveserver2_interactive_site.update(
        params.config['configurations']['hiveserver2-interactive-site'])

    # Create config files under /etc/hive2/conf and /etc/hive2/conf/conf.server:
    #   hive-site.xml
    #   hive-env.sh
    #   llap-daemon-log4j2.properties
    #   llap-cli-log4j2.properties
    #   hive-log4j2.properties
    #   hive-exec-log4j2.properties
    #   beeline-log4j2.properties

    hive2_conf_dirs_list = params.hive_conf_dirs_list
    hive2_client_conf_path = format(
        "{stack_root}/current/{component_directory}/conf")

    # Making copy of 'merged_hive_interactive_site' in 'merged_hive_interactive_site_copy', and deleting 'javax.jdo.option.ConnectionPassword'
    # config from there, as Hive2 client shouldn't have that config.
    merged_hive_interactive_site_copy = merged_hive_interactive_site.copy()
    for item in exclude_list_for_hive2_client:
        if item in merged_hive_interactive_site.keys():
            del merged_hive_interactive_site_copy[item]

    for conf_dir in hive2_conf_dirs_list:
        mode_identified = 0644 if conf_dir == hive2_client_conf_path else 0600
        if conf_dir == hive2_client_conf_path:
            XmlConfig(
                "hive-site.xml",
                conf_dir=conf_dir,
                configurations=merged_hive_interactive_site_copy,
                configuration_attributes=params.
                config['configurationAttributes']['hive-interactive-site'],
                owner=params.hive_user,
                group=params.user_group,
                mode=0644)
        else:
            merged_hive_interactive_site = update_credential_provider_path(
                merged_hive_interactive_site, 'hive-site',
                os.path.join(conf_dir, 'hive-site.jceks'), params.hive_user,
                params.user_group)
            XmlConfig(
                "hive-site.xml",
                conf_dir=conf_dir,
                configurations=merged_hive_interactive_site,
                configuration_attributes=params.
                config['configurationAttributes']['hive-interactive-site'],
                owner=params.hive_user,
                group=params.user_group,
                mode=0600)
        XmlConfig(
            "hiveserver2-site.xml",
            conf_dir=conf_dir,
            configurations=merged_hiveserver2_interactive_site,
            configuration_attributes=params.config['configurationAttributes']
            ['hiveserver2-interactive-site'],
            owner=params.hive_user,
            group=params.user_group,
            mode=mode_identified)

        hive_server_interactive_conf_dir = conf_dir

        File(format("{hive_server_interactive_conf_dir}/hive-env.sh"),
             owner=params.hive_user,
             group=params.user_group,
             mode=mode_identified,
             content=InlineTemplate(params.hive_interactive_env_sh_template))

        llap_daemon_log4j_filename = 'llap-daemon-log4j2.properties'
        File(format(
            "{hive_server_interactive_conf_dir}/{llap_daemon_log4j_filename}"),
             mode=mode_identified,
             group=params.user_group,
             owner=params.hive_user,
             content=InlineTemplate(params.llap_daemon_log4j))

        llap_cli_log4j2_filename = 'llap-cli-log4j2.properties'
        File(format(
            "{hive_server_interactive_conf_dir}/{llap_cli_log4j2_filename}"),
             mode=mode_identified,
             group=params.user_group,
             owner=params.hive_user,
             content=InlineTemplate(params.llap_cli_log4j2))

        hive_log4j2_filename = 'hive-log4j2.properties'
        File(format(
            "{hive_server_interactive_conf_dir}/{hive_log4j2_filename}"),
             mode=mode_identified,
             group=params.user_group,
             owner=params.hive_user,
             content=InlineTemplate(params.hive_log4j2))

        hive_exec_log4j2_filename = 'hive-exec-log4j2.properties'
        File(format(
            "{hive_server_interactive_conf_dir}/{hive_exec_log4j2_filename}"),
             mode=mode_identified,
             group=params.user_group,
             owner=params.hive_user,
             content=InlineTemplate(params.hive_exec_log4j2))

        beeline_log4j2_filename = 'beeline-log4j2.properties'
        File(format(
            "{hive_server_interactive_conf_dir}/{beeline_log4j2_filename}"),
             mode=mode_identified,
             group=params.user_group,
             owner=params.hive_user,
             content=InlineTemplate(params.beeline_log4j2))

        File(os.path.join(hive_server_interactive_conf_dir,
                          "hadoop-metrics2-hiveserver2.properties"),
             owner=params.hive_user,
             group=params.user_group,
             mode=mode_identified,
             content=Template("hadoop-metrics2-hiveserver2.properties.j2"))

        File(format(
            "{hive_server_interactive_conf_dir}/hadoop-metrics2-llapdaemon.properties"
        ),
             owner=params.hive_user,
             group=params.user_group,
             mode=mode_identified,
             content=Template("hadoop-metrics2-llapdaemon.j2"))

        File(format(
            "{hive_server_interactive_conf_dir}/hadoop-metrics2-llaptaskscheduler.properties"
        ),
             owner=params.hive_user,
             group=params.user_group,
             mode=mode_identified,
             content=Template("hadoop-metrics2-llaptaskscheduler.j2"))

    # On some OS this folder could be not exists, so we will create it before pushing there files
    Directory(params.limits_conf_dir,
              create_parents=True,
              owner='root',
              group='root')

    File(os.path.join(params.limits_conf_dir, 'hive.conf'),
         owner='root',
         group='root',
         mode=0644,
         content=Template("hive.conf.j2"))

    if not os.path.exists(params.target_hive_interactive):
        jdbc_connector(params.target_hive_interactive,
                       params.hive_intaractive_previous_jdbc_jar)

    File(format("/usr/lib/ambari-agent/{check_db_connection_jar_name}"),
         content=DownloadSource(
             format("{jdk_location}{check_db_connection_jar_name}")),
         mode=0644)
    File(params.start_hiveserver2_interactive_path,
         mode=0755,
         content=Template(format('{start_hiveserver2_interactive_script}')))

    Directory(params.hive_pid_dir,
              create_parents=True,
              cd_access='a',
              owner=params.hive_user,
              group=params.user_group,
              mode=0755)
    Directory(params.hive_log_dir,
              create_parents=True,
              cd_access='a',
              owner=params.hive_user,
              group=params.user_group,
              mode=0755)
    Directory(params.hive_interactive_var_lib,
              create_parents=True,
              cd_access='a',
              owner=params.hive_user,
              group=params.user_group,
              mode=0755)
示例#7
0
def setup_logfeeder():
    import params

    Directory([
        params.logfeeder_log_dir, params.logfeeder_pid_dir,
        params.logfeeder_checkpoint_folder
    ],
              mode=0755,
              cd_access='a',
              create_parents=True)

    Directory([params.logfeeder_dir, params.logsearch_logfeeder_conf],
              mode=0755,
              cd_access='a',
              create_parents=True,
              recursive_ownership=True)

    File(format("{logfeeder_log_dir}/{logfeeder_log}"), mode=0644, content='')

    if params.credential_store_enabled:
        params.logfeeder_env_config = update_credential_provider_path(
            params.logfeeder_env_config, 'logfeeder-env',
            params.logfeeder_env_jceks_file, params.logsearch_user,
            params.user_group)
        params.logfeeder_properties[
            HADOOP_CREDENTIAL_PROVIDER_PROPERTY_NAME] = 'jceks://file' + params.logfeeder_env_jceks_file
        File(format("{logsearch_logfeeder_keys_folder}/ks_pass.txt"),
             action="delete")
        File(format("{logsearch_logfeeder_keys_folder}/ts_pass.txt"),
             action="delete")
    else:
        Directory(params.logsearch_logfeeder_keys_folder,
                  cd_access='a',
                  mode=0755,
                  owner=params.logsearch_user,
                  group=params.user_group)

        File(format("{logsearch_logfeeder_keys_folder}/ks_pass.txt"),
             content=params.logfeeder_keystore_password,
             mode=0600,
             owner=params.logsearch_user,
             group=params.user_group)

        File(format("{logsearch_logfeeder_keys_folder}/ts_pass.txt"),
             content=params.logfeeder_truststore_password,
             mode=0600,
             owner=params.logsearch_user,
             group=params.user_group)

    PropertiesFile(format("{logsearch_logfeeder_conf}/logfeeder.properties"),
                   properties=params.logfeeder_properties)

    File(format("{logsearch_logfeeder_conf}/logfeeder-env.sh"),
         content=InlineTemplate(params.logfeeder_env_content),
         mode=0755)

    File(format("{logsearch_logfeeder_conf}/log4j.xml"),
         content=InlineTemplate(params.logfeeder_log4j_content))

    File(format("{logsearch_logfeeder_conf}/grok-patterns"),
         content=InlineTemplate(params.logfeeder_grok_patterns),
         encoding="utf-8")

    File(format("{logsearch_logfeeder_conf}/global.config.json"),
         content=Template("global.config.json.j2"))

    File(format("{logsearch_logfeeder_conf}/input.config-ambari.json"),
         content=InlineTemplate(params.logfeeder_ambari_config_content),
         encoding="utf-8")

    File(format("{logsearch_logfeeder_conf}/output.config.json"),
         content=InlineTemplate(params.logfeeder_output_config_content),
         encoding="utf-8")

    if params.logfeeder_system_log_enabled:
        File(format(
            "{logsearch_logfeeder_conf}/input.config-system_messages.json"),
             content=params.logfeeder_system_messages_content)
        File(format("{logsearch_logfeeder_conf}/input.config-secure_log.json"),
             content=params.logfeeder_secure_log_content)

    if params.logsearch_solr_kerberos_enabled:
        File(format("{logfeeder_jaas_file}"),
             content=Template("logfeeder_jaas.conf.j2"))
示例#8
0
def setup_logfeeder():
    import params

    Directory([
        params.logfeeder_log_dir, params.logfeeder_pid_dir,
        params.logfeeder_checkpoint_folder
    ],
              mode=0755,
              cd_access='a',
              create_parents=True)

    Directory([params.logfeeder_dir, params.logsearch_logfeeder_conf],
              mode=0755,
              cd_access='a',
              create_parents=True,
              recursive_ownership=True)

    File(params.logfeeder_log, mode=0644, content='')

    params.logfeeder_env_config = update_credential_provider_path(
        params.logfeeder_env_config, 'logfeeder-env',
        params.logfeeder_env_jceks_file, params.logsearch_user,
        params.user_group)
    params.logfeeder_properties[
        HADOOP_CREDENTIAL_PROVIDER_PROPERTY_NAME] = 'jceks://file' + params.logfeeder_env_jceks_file
    PropertiesFile(format("{logsearch_logfeeder_conf}/logfeeder.properties"),
                   properties=params.logfeeder_properties)

    File(format("{logsearch_logfeeder_conf}/logfeeder-env.sh"),
         content=InlineTemplate(params.logfeeder_env_content),
         mode=0755)

    File(format("{logsearch_logfeeder_conf}/log4j.xml"),
         content=InlineTemplate(params.logfeeder_log4j_content))

    File(format("{logsearch_logfeeder_conf}/grok-patterns"),
         content=InlineTemplate(params.logfeeder_grok_patterns),
         encoding="utf-8")

    File(format("{logsearch_logfeeder_conf}/input.config-ambari.json"),
         content=InlineTemplate(params.logfeeder_ambari_config_content),
         encoding="utf-8")

    File(format("{logsearch_logfeeder_conf}/output.config.json"),
         content=InlineTemplate(params.logfeeder_output_config_content),
         encoding="utf-8")

    for file_name in params.logfeeder_default_config_file_names:
        File(format("{logsearch_logfeeder_conf}/" + file_name),
             content=Template(file_name + ".j2"))

    File(format(
        "{logsearch_logfeeder_conf}/input.config-logfeeder-custom.json"),
         action='delete')
    for service, pattern_content in params.logfeeder_metadata.iteritems():
        File(format("{logsearch_logfeeder_conf}/input.config-" +
                    service.replace('-logsearch-conf', '') + ".json"),
             content=InlineTemplate(pattern_content, extra_imports=[default]))

    if params.logfeeder_system_log_enabled:
        File(format(
            "{logsearch_logfeeder_conf}/input.config-system_messages.json"),
             content=params.logfeeder_system_messages_content)
        File(format("{logsearch_logfeeder_conf}/input.config-secure_log.json"),
             content=params.logfeeder_secure_log_content)

    if params.logsearch_solr_kerberos_enabled:
        File(format("{logfeeder_jaas_file}"),
             content=Template("logfeeder_jaas.conf.j2"))
示例#9
0
def beacon(type, action=None, upgrade_type=None):
    import params

    if action == 'config':
        create_directory(params.beacon_home_dir)
        create_directory(params.beacon_plugin_staging_dir)

        cloud_cred_provider = params.beacon_cloud_cred_provider_dir.split('://')[1]
        cloud_cred_parts = cloud_cred_provider.split('/', 1)
        create_directory("/" + cloud_cred_parts[1], cloud_cred_parts[0])

        if params.is_hive_installed:
            if not isinstance(params.hive_repl_cmrootdir, UnknownConfiguration):
                beacon_utils.create_hdfs_directory(params.hive_repl_cmrootdir,
                                                   params.hive_user,
                                                   01777)
            if not isinstance(params.hive_repl_rootdir, UnknownConfiguration):
                beacon_utils.create_hdfs_directory(params.hive_repl_rootdir,
                                                   params.hive_user,
                                                   0700)

        Directory(params.beacon_pid_dir,
                  owner=params.beacon_user,
                  create_parents=True,
                  mode=0755,
                  cd_access="a",
                  )

        Directory(params.beacon_data_dir,
                  owner=params.beacon_user,
                  create_parents=True,
                  mode=0755,
                  cd_access="a",
                  )

        Directory(params.beacon_log_dir,
                  owner=params.beacon_user,
                  create_parents=True,
                  mode=0755,
                  cd_access="a",
                  )

        Directory(params.beacon_webapp_dir,
                  owner=params.beacon_user,
                  create_parents=True)

        Directory(params.beacon_home,
                  owner=params.beacon_user,
                  create_parents=True)

        Directory(params.etc_prefix_dir,
                  mode=0755,
                  create_parents=True)

        Directory(params.beacon_conf_dir,
                  owner=params.beacon_user,
                  create_parents=True)

    environment_dictionary = {
        "HADOOP_HOME": params.hadoop_home_dir,
        "JAVA_HOME": params.java_home,
        "BEACON_LOG_DIR": params.beacon_log_dir,
        "BEACON_PID_DIR": params.beacon_pid_dir,
        "BEACON_DATA_DIR": params.beacon_data_dir,
        "BEACON_CLUSTER": params.beacon_cluster_name,
        "HADOOP_CONF": params.hadoop_conf_dir
    }
    pid = get_user_call_output.get_user_call_output(format("cat {server_pid_file}"), user=params.beacon_user,
                                                    is_checked_call=False)[1]
    process_exists = format("ls {server_pid_file} && ps -p {pid}")

    if type == 'server':
        if action == 'start':
            try:

                if params.credential_store_enabled:
                    if 'hadoop.security.credential.provider.path' in params.beacon_env:
                        credential_provider_path = params.beacon_env['hadoop.security.credential.provider.path']
                        credential_provider_src_path = credential_provider_path[len('jceks://file'):]
                        File(params.beacon_credential_provider_path[len('jceks://file'):],
                             owner=params.beacon_user,
                             group=params.user_group,
                             mode=0640,
                             content=StaticFile(credential_provider_src_path)
                             )
                    else:
                        Logger.error(
                            "hadoop.security.credential.provider.path property not found in beacon-env config-type")

                File(os.path.join(params.beacon_conf_dir, 'beacon.yml'),
                     owner='root',
                     group='root',
                     mode=0644,
                     content=Template("beacon.yml.j2")
                     )

                params.beacon_security_site = update_credential_provider_path(
                    params.beacon_security_site,
                    'beacon-security-site',
                    os.path.join(params.beacon_conf_dir, 'beacon-security-site.jceks'),
                    params.beacon_user,
                    params.user_group
                )

                XmlConfig("beacon-security-site.xml",
                          conf_dir=params.beacon_conf_dir,
                          configurations=params.beacon_security_site,
                          configuration_attributes=params.config['configuration_attributes']['beacon-security-site'],
                          owner=params.beacon_user,
                          group=params.user_group,
                          mode=0644
                          )

                Execute(format('{beacon_home}/bin/beacon setup'),
                        user=params.beacon_user,
                        path=params.hadoop_bin_dir,
                        environment=environment_dictionary
                        )

                if params.download_mysql_driver:
                    download_mysql_driver()

                Execute(format('{beacon_home}/bin/beacon start'),
                        user=params.beacon_user,
                        path=params.hadoop_bin_dir,
                        environment=environment_dictionary,
                        not_if=process_exists,
                        )

                if params.has_ranger_admin:
                    ranger_admin_url = params.config['configurations']['admin-properties']['policymgr_external_url']
                    ranger_admin_user = params.config['configurations']['ranger-env']['admin_username']
                    ranger_admin_passwd = params.config['configurations']['ranger-env']['admin_password']

                    if not params.security_enabled:
                        # Creating/Updating beacon.ranger.user with role "ROLE_SYS_ADMIN"
                        response_user = ranger_api_functions.get_user(ranger_admin_url, params.beacon_ranger_user,
                                                                      format(
                                                                          "{ranger_admin_user}:{ranger_admin_passwd}"))
                        if response_user is not None and response_user['name'] == params.beacon_ranger_user:
                            response_user_role = response_user['userRoleList'][0]
                            Logger.info(format(
                                "Beacon Ranger User with username {beacon_ranger_user} exists with role {response_user_role}"))
                            if response_user_role != "ROLE_SYS_ADMIN":
                                response_user_role = ranger_api_functions.update_user_role(ranger_admin_url,
                                                                                           params.beacon_ranger_user,
                                                                                           "ROLE_SYS_ADMIN", format(
                                        "{ranger_admin_user}:{ranger_admin_passwd}"))
                        else:
                            response_code = ranger_api_functions.create_user(ranger_admin_url,
                                                                             params.beacon_ranger_user,
                                                                             params.beacon_ranger_password,
                                                                             "ROLE_SYS_ADMIN", format(
                                    "{ranger_admin_user}:{ranger_admin_passwd}"))

                    # Updating beacon_user role depending upon cluster environment
                    count = 0
                    while count < 10:
                        beacon_user_get = ranger_api_functions.get_user(ranger_admin_url, params.beacon_user, format(
                            "{ranger_admin_user}:{ranger_admin_passwd}"))
                        if beacon_user_get is not None:
                            break
                        else:
                            time.sleep(10)  # delay for 10 seconds
                            count = count + 1
                            Logger.error(
                                format('Retrying to fetch {beacon_user} user from Ranger Admin for {count} time(s)'))

                    if beacon_user_get is not None and beacon_user_get['name'] == params.beacon_user:
                        beacon_user_get_role = beacon_user_get['userRoleList'][0]
                        if params.security_enabled and beacon_user_get_role != "ROLE_SYS_ADMIN":
                            beacon_service_user = ranger_api_functions.update_user_role(ranger_admin_url,
                                                                                        params.beacon_user,
                                                                                        "ROLE_SYS_ADMIN", format(
                                    "{ranger_admin_user}:{ranger_admin_passwd}"))
                        elif not params.security_enabled and beacon_user_get_role != "ROLE_USER":
                            beacon_service_user = ranger_api_functions.update_user_role(ranger_admin_url,
                                                                                        params.beacon_user, "ROLE_USER",
                                                                                        format(
                                                                                            "{ranger_admin_user}:{ranger_admin_passwd}"))

                    if params.ranger_hive_plugin_enabled:
                        # Get Ranger Hive default policy for resource database, table, column
                        response_policy = ranger_api_functions.get_ranger_service_default_policy(ranger_admin_url,
                                                                                                 params.service_name,
                                                                                                 format(
                                                                                                     "{ranger_admin_user}:{ranger_admin_passwd}"),
                                                                                                 ['database', 'table',
                                                                                                  'column'])
                        if response_policy:
                            user_present = ranger_api_functions.check_user_policy(response_policy, params.beacon_user)
                            if not user_present and beacon_user_get is not None and beacon_user_get[
                                'name'] == params.beacon_user:
                                policy_id = response_policy['id']
                                beacon_user_policy_item = {'groups': [], 'conditions': [],
                                                           'users': [params.beacon_user],
                                                           'accesses': [{'isAllowed': True, 'type': 'all'},
                                                                        {'isAllowed': True, 'type': 'repladmin'}],
                                                           'delegateAdmin': False}
                                policy_data = ranger_api_functions.update_policy_item(response_policy,
                                                                                      beacon_user_policy_item)
                                update_policy_response = ranger_api_functions.update_policy(ranger_admin_url, policy_id,
                                                                                            policy_data, format(
                                        "{ranger_admin_user}:{ranger_admin_passwd}"))

                        # Get Ranger Hive default policy for resource hiveservice
                        response_policy = ranger_api_functions.get_ranger_service_default_policy(ranger_admin_url,
                                                                                                 params.service_name,
                                                                                                 format(
                                                                                                     "{ranger_admin_user}:{ranger_admin_passwd}"),
                                                                                                 ['hiveservice'])
                        if response_policy:
                            user_present = ranger_api_functions.check_user_policy(response_policy, params.beacon_user)
                            if not user_present and beacon_user_get is not None and beacon_user_get[
                                'name'] == params.beacon_user:
                                # Updating beacon_user in Ranger Hive default policy for resource hiveservice
                                policy_id = response_policy['id']
                                beacon_user_policy_item = {'groups': [], 'conditions': [],
                                                           'users': [params.beacon_user],
                                                           'accesses': [{'isAllowed': True, 'type': 'serviceadmin'}],
                                                           'delegateAdmin': False}
                                policy_data = ranger_api_functions.update_policy_item(response_policy,
                                                                                      beacon_user_policy_item)
                                update_policy_response = ranger_api_functions.update_policy(ranger_admin_url, policy_id,
                                                                                            policy_data, format(
                                        "{ranger_admin_user}:{ranger_admin_passwd}"))

                    if params.ranger_atlas_plugin_enabled:
                        # Creating beacon.atlas.user with role "ROLE_USER"
                        beacon_atlas_user_response = ranger_api_functions.get_user(ranger_admin_url,
                                                                                   params.beacon_atlas_user, format(
                                "{ranger_admin_user}:{ranger_admin_passwd}"))
                        if beacon_atlas_user_response is not None and beacon_atlas_user_response[
                            'name'] == params.beacon_atlas_user:
                            beacon_atlas_user_role = beacon_atlas_user_response['userRoleList'][0]
                            Logger.info(format(
                                "Beacon Atlas User with username {beacon_atlas_user} exists with role {beacon_atlas_user_role}"))
                        else:
                            beacon_atlas_user_create_response_code = ranger_api_functions.create_user(ranger_admin_url,
                                                                                                      params.beacon_atlas_user,
                                                                                                      params.beacon_atlas_password,
                                                                                                      "ROLE_USER",
                                                                                                      format(
                                                                                                          "{ranger_admin_user}:{ranger_admin_passwd}"))

                        if params.security_enabled:
                            get_beacon_atlas_user = params.beacon_user
                        else:
                            get_beacon_atlas_user = params.beacon_atlas_user

                        if params.is_stack_3_0_or_further:
                            # Get Ranger Atlas default policy for ENTITY TYPE, ENTITY CLASSIFICATION and ENTITY ID resource
                            atlas_entity_policy_response = ranger_api_functions.get_ranger_service_default_policy(
                                ranger_admin_url, params.ranger_atlas_service_name,
                                format("{ranger_admin_user}:{ranger_admin_passwd}"),
                                ['entity', 'entity-classification', 'entity-type'])

                            if atlas_entity_policy_response:
                                beacon_atlas_user_present = ranger_api_functions.check_user_policy(
                                    atlas_entity_policy_response, get_beacon_atlas_user)
                                if not beacon_atlas_user_present:
                                    # Updating beacon atlas user in Ranger Atlas default policy for entity resource
                                    atlas_entity_policy_id = atlas_entity_policy_response['id']
                                    beacon_atlas_user_policy_item = {'groups': [], 'conditions': [],
                                                                     'users': [get_beacon_atlas_user], 'accesses': [
                                            {'type': 'entity-read', 'isAllowed': True},
                                            {'type': 'entity-create', 'isAllowed': True},
                                            {'type': 'entity-update', 'isAllowed': True}]}
                                    atlas_entity_policy_data = ranger_api_functions.update_policy_item(
                                        atlas_entity_policy_response, beacon_atlas_user_policy_item)
                                    atlas_update_entity_policy_response = ranger_api_functions.update_policy(
                                        ranger_admin_url, atlas_entity_policy_id, atlas_entity_policy_data,
                                        format("{ranger_admin_user}:{ranger_admin_passwd}"))

                            # Get Ranger Atlas default policy for ATLAS SERVICE resource
                            atlas_service_policy_response = ranger_api_functions.get_ranger_service_default_policy(
                                ranger_admin_url, params.ranger_atlas_service_name,
                                format("{ranger_admin_user}:{ranger_admin_passwd}"), ['atlas-service'])
                            if atlas_service_policy_response:
                                beacon_atlas_user_present = ranger_api_functions.check_user_policy(
                                    atlas_service_policy_response, get_beacon_atlas_user)
                                if not beacon_atlas_user_present:
                                    # Updating beacon atlas user in Ranger Atlas default policy for service resource
                                    atlas_service_policy_id = atlas_service_policy_response['id']
                                    beacon_atlas_user_policy_item = {'groups': [], 'conditions': [],
                                                                     'users': [get_beacon_atlas_user], 'accesses': [
                                            {'type': 'admin-export', 'isAllowed': True},
                                            {'type': 'admin-import', 'isAllowed': True}]}
                                    atlas_service_policy_data = ranger_api_functions.update_policy_item(
                                        atlas_service_policy_response, beacon_atlas_user_policy_item)
                                    atlas_service_policy_update_response = ranger_api_functions.update_policy(
                                        ranger_admin_url, atlas_service_policy_id, atlas_service_policy_data,
                                        format("{ranger_admin_user}:{ranger_admin_passwd}"))

                            # Get Ranger Atlas default policy for TYPE CATEGORY and TYPE resource
                            atlas_type_category_policy_response = ranger_api_functions.get_ranger_service_default_policy(
                                ranger_admin_url, params.ranger_atlas_service_name,
                                format("{ranger_admin_user}:{ranger_admin_passwd}"), ['type', 'type-category'])

                            if atlas_type_category_policy_response:
                                beacon_atlas_user_present = ranger_api_functions.check_user_policy(
                                    atlas_type_category_policy_response, get_beacon_atlas_user)
                                if not beacon_atlas_user_present:
                                    # Updating beacon atlas user in Ranger Atlas default policy for type category and type resource
                                    atlas_type_category_policy_id = atlas_type_category_policy_response['id']
                                    beacon_atlas_user_policy_item = {'groups': [], 'conditions': [],
                                                                     'users': [get_beacon_atlas_user], 'accesses': [
                                            {'type': 'type-create', 'isAllowed': True},
                                            {'type': 'type-update', 'isAllowed': True},
                                            {'type': 'type-delete', 'isAllowed': True}]}
                                    atlas_type_category_policy_data = ranger_api_functions.update_policy_item(
                                        atlas_type_category_policy_response, beacon_atlas_user_policy_item)
                                    atlas_update_type_category_policy_response = ranger_api_functions.update_policy(
                                        ranger_admin_url, atlas_type_category_policy_id,
                                        atlas_type_category_policy_data,
                                        format("{ranger_admin_user}:{ranger_admin_passwd}"))
                        else:
                            # Get Ranger Atlas default policy for ENTITY resource
                            atlas_policy_response = ranger_api_functions.get_ranger_service_default_policy(
                                ranger_admin_url, params.ranger_atlas_service_name,
                                format("{ranger_admin_user}:{ranger_admin_passwd}"), ['entity'])

                            if atlas_policy_response:
                                beacon_atlas_user_present = ranger_api_functions.check_user_policy(
                                    atlas_policy_response, get_beacon_atlas_user)
                                if not beacon_atlas_user_present:
                                    # Updating beacon atlas user in Ranger Atlas default policy for entity resource
                                    atlas_policy_id = atlas_policy_response['id']
                                    beacon_atlas_user_policy_item = {'groups': [], 'conditions': [],
                                                                     'users': [get_beacon_atlas_user],
                                                                     'accesses': [{'type': 'read', 'isAllowed': True},
                                                                                  {'type': 'create', 'isAllowed': True},
                                                                                  {'type': 'update', 'isAllowed': True},
                                                                                  {'type': 'delete', 'isAllowed': True},
                                                                                  {'type': 'all', 'isAllowed': True}]}
                                    atlas_policy_data = ranger_api_functions.update_policy_item(atlas_policy_response,
                                                                                                beacon_atlas_user_policy_item)
                                    atlas_update_policy_response = ranger_api_functions.update_policy(ranger_admin_url,
                                                                                                      atlas_policy_id,
                                                                                                      atlas_policy_data,
                                                                                                      format(
                                                                                                          "{ranger_admin_user}:{ranger_admin_passwd}"))

                            # Get Ranger Atlas default policy for OPERATION resource
                            atlas_operation_policy_response = ranger_api_functions.get_ranger_service_default_policy(
                                ranger_admin_url, params.ranger_atlas_service_name,
                                format("{ranger_admin_user}:{ranger_admin_passwd}"), ['operation'])
                            if atlas_operation_policy_response:
                                beacon_atlas_user_present = ranger_api_functions.check_user_policy(
                                    atlas_operation_policy_response, get_beacon_atlas_user)
                                if not beacon_atlas_user_present:
                                    # Updating beacon atlas user in Ranger Atlas default policy for operation resource
                                    atlas_operation_policy_id = atlas_operation_policy_response['id']
                                    beacon_atlas_user_policy_item = {'groups': [], 'conditions': [],
                                                                     'users': [get_beacon_atlas_user],
                                                                     'accesses': [{'type': 'read', 'isAllowed': True},
                                                                                  {'type': 'create', 'isAllowed': True},
                                                                                  {'type': 'update', 'isAllowed': True},
                                                                                  {'type': 'delete', 'isAllowed': True},
                                                                                  {'type': 'all', 'isAllowed': True}]}
                                    atlas_operation_policy_data = ranger_api_functions.update_policy_item(
                                        atlas_operation_policy_response, beacon_atlas_user_policy_item)
                                    atlas_operation_policy_update_response = ranger_api_functions.update_policy(
                                        ranger_admin_url, atlas_operation_policy_id, atlas_operation_policy_data,
                                        format("{ranger_admin_user}:{ranger_admin_passwd}"))
            except Exception as e:
                show_logs(params.beacon_log_dir, params.beacon_user)

        if action == 'stop':
            try:
                Execute(format('{beacon_home}/bin/beacon stop'),
                        user=params.beacon_user,
                        path=params.hadoop_bin_dir,
                        environment=environment_dictionary)
            except:
                show_logs(params.beacon_log_dir, params.beacon_user)

            File(params.server_pid_file, action='delete')
示例#10
0
文件: hive.py 项目: lshev/ambari
def hive(name=None):
    import params
    hive_client_conf_path = format("/etc/hive/conf")
    # Permissions 644 for conf dir (client) files, and 600 for conf.server
    mode_identified = 0644 if params.hive_config_dir == hive_client_conf_path else 0600
    if name == 'hiveserver2':
        if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
            params.HdfsResource(params.hcat_hdfs_user_dir,
                                type="directory",
                                action="create_on_execute",
                                owner=params.hcat_user,
                                mode=params.hcat_hdfs_user_mode)

        params.HdfsResource(params.webhcat_hdfs_user_dir,
                            type="directory",
                            action="create_on_execute",
                            owner=params.webhcat_user,
                            mode=params.webhcat_hdfs_user_mode)

        # ****** Begin Copy Tarballs ******
        # *********************************
        #  if copy tarball to HDFS feature  supported copy mapreduce.tar.gz and tez.tar.gz to HDFS
        copy_to_hdfs("mapreduce",
                     params.user_group,
                     params.hdfs_user,
                     skip=params.sysprep_skip_copy_tarballs_hdfs)
        copy_to_hdfs("tez",
                     params.user_group,
                     params.hdfs_user,
                     skip=params.sysprep_skip_copy_tarballs_hdfs)

        # Always copy pig.tar.gz and hive.tar.gz using the appropriate mode.
        # This can use a different source and dest location to account
        copy_to_hdfs("pig",
                     params.user_group,
                     params.hdfs_user,
                     file_mode=params.tarballs_mode,
                     custom_source_file=params.pig_tar_source,
                     custom_dest_file=params.pig_tar_dest_file,
                     skip=params.sysprep_skip_copy_tarballs_hdfs)
        copy_to_hdfs("hive",
                     params.user_group,
                     params.hdfs_user,
                     file_mode=params.tarballs_mode,
                     custom_source_file=params.hive_tar_source,
                     custom_dest_file=params.hive_tar_dest_file,
                     skip=params.sysprep_skip_copy_tarballs_hdfs)

        wildcard_tarballs = ["sqoop", "hadoop_streaming"]
        for tarball_name in wildcard_tarballs:
            source_file_pattern = eval("params." + tarball_name +
                                       "_tar_source")
            dest_dir = eval("params." + tarball_name + "_tar_dest_dir")

            if source_file_pattern is None or dest_dir is None:
                continue

            source_files = glob.glob(
                source_file_pattern) if "*" in source_file_pattern else [
                    source_file_pattern
                ]
            for source_file in source_files:
                src_filename = os.path.basename(source_file)
                dest_file = os.path.join(dest_dir, src_filename)

                copy_to_hdfs(tarball_name,
                             params.user_group,
                             params.hdfs_user,
                             file_mode=params.tarballs_mode,
                             custom_source_file=source_file,
                             custom_dest_file=dest_file,
                             skip=params.sysprep_skip_copy_tarballs_hdfs)
        # ******* End Copy Tarballs *******
        # *********************************

        # if warehouse directory is in DFS
        if not params.whs_dir_protocol or params.whs_dir_protocol == urlparse(
                params.default_fs).scheme:
            # Create Hive Metastore Warehouse Dir
            params.HdfsResource(params.hive_apps_whs_dir,
                                type="directory",
                                action="create_on_execute",
                                owner=params.hive_user,
                                mode=0777)
        else:
            Logger.info(
                format(
                    "Not creating warehouse directory '{hive_apps_whs_dir}', as the location is not in DFS."
                ))

        # Create Hive User Dir
        params.HdfsResource(params.hive_hdfs_user_dir,
                            type="directory",
                            action="create_on_execute",
                            owner=params.hive_user,
                            mode=params.hive_hdfs_user_mode)

        if not is_empty(params.hive_exec_scratchdir) and not urlparse(
                params.hive_exec_scratchdir).path.startswith("/tmp"):
            params.HdfsResource(
                params.hive_exec_scratchdir,
                type="directory",
                action="create_on_execute",
                owner=params.hive_user,
                group=params.hdfs_user,
                mode=0777
            )  # Hive expects this dir to be writeable by everyone as it is used as a temp dir

        params.HdfsResource(None, action="execute")

    Directory(params.hive_etc_dir_prefix, mode=0755)

    # We should change configurations for client as well as for server.
    # The reason is that stale-configs are service-level, not component.
    Logger.info("Directories to fill with configs: %s" %
                str(params.hive_conf_dirs_list))
    for conf_dir in params.hive_conf_dirs_list:
        fill_conf_dir(conf_dir)

    params.hive_site_config = update_credential_provider_path(
        params.hive_site_config, 'hive-site',
        os.path.join(params.hive_conf_dir, 'hive-site.jceks'),
        params.hive_user, params.user_group)
    XmlConfig(
        "hive-site.xml",
        conf_dir=params.hive_config_dir,
        configurations=params.hive_site_config,
        configuration_attributes=params.config['configuration_attributes']
        ['hive-site'],
        owner=params.hive_user,
        group=params.user_group,
        mode=mode_identified)

    # Generate atlas-application.properties.xml file
    if params.enable_atlas_hook:
        atlas_hook_filepath = os.path.join(params.hive_config_dir,
                                           params.atlas_hook_filename)
        setup_atlas_hook(SERVICE.HIVE,
                         params.hive_atlas_application_properties,
                         atlas_hook_filepath, params.hive_user,
                         params.user_group)

    if name == 'hiveserver2':
        XmlConfig(
            "hiveserver2-site.xml",
            conf_dir=params.hive_server_conf_dir,
            configurations=params.config['configurations']['hiveserver2-site'],
            configuration_attributes=params.config['configuration_attributes']
            ['hiveserver2-site'],
            owner=params.hive_user,
            group=params.user_group,
            mode=0600)

    if params.hive_metastore_site_supported and name == 'metastore':
        XmlConfig(
            "hivemetastore-site.xml",
            conf_dir=params.hive_server_conf_dir,
            configurations=params.config['configurations']
            ['hivemetastore-site'],
            configuration_attributes=params.config['configuration_attributes']
            ['hivemetastore-site'],
            owner=params.hive_user,
            group=params.user_group,
            mode=0600)

    File(format("{hive_config_dir}/hive-env.sh"),
         owner=params.hive_user,
         group=params.user_group,
         mode=mode_identified,
         content=InlineTemplate(params.hive_env_sh_template))

    # On some OS this folder could be not exists, so we will create it before pushing there files
    Directory(params.limits_conf_dir,
              create_parents=True,
              owner='root',
              group='root')

    File(os.path.join(params.limits_conf_dir, 'hive.conf'),
         owner='root',
         group='root',
         mode=0644,
         content=Template("hive.conf.j2"))
    if params.security_enabled:
        File(os.path.join(params.hive_config_dir, 'zkmigrator_jaas.conf'),
             owner=params.hive_user,
             group=params.user_group,
             content=Template("zkmigrator_jaas.conf.j2"))

    if name == 'metastore' or name == 'hiveserver2':
        if params.hive_jdbc_target is not None and not os.path.exists(
                params.hive_jdbc_target):
            jdbc_connector(params.hive_jdbc_target,
                           params.hive_previous_jdbc_jar)
        if params.hive2_jdbc_target is not None and not os.path.exists(
                params.hive2_jdbc_target):
            jdbc_connector(params.hive2_jdbc_target,
                           params.hive2_previous_jdbc_jar)

    File(
        format("/usr/lib/ambari-agent/{check_db_connection_jar_name}"),
        content=DownloadSource(
            format("{jdk_location}{check_db_connection_jar_name}")),
        mode=0644,
    )

    if name == 'metastore':
        File(os.path.join(params.hive_server_conf_dir,
                          "hadoop-metrics2-hivemetastore.properties"),
             owner=params.hive_user,
             group=params.user_group,
             mode=0600,
             content=Template("hadoop-metrics2-hivemetastore.properties.j2"))

        File(params.start_metastore_path,
             mode=0755,
             content=StaticFile('startMetastore.sh'))
    elif name == 'hiveserver2':
        File(params.start_hiveserver2_path,
             mode=0755,
             content=Template(format('{start_hiveserver2_script}')))

        File(os.path.join(params.hive_server_conf_dir,
                          "hadoop-metrics2-hiveserver2.properties"),
             owner=params.hive_user,
             group=params.user_group,
             mode=0600,
             content=Template("hadoop-metrics2-hiveserver2.properties.j2"))

    if name != "client":
        Directory(params.hive_pid_dir,
                  create_parents=True,
                  cd_access='a',
                  owner=params.hive_user,
                  group=params.user_group,
                  mode=0755)
        Directory(params.hive_log_dir,
                  create_parents=True,
                  cd_access='a',
                  owner=params.hive_user,
                  group=params.user_group,
                  mode=0755)
        Directory(params.hive_var_lib,
                  create_parents=True,
                  cd_access='a',
                  owner=params.hive_user,
                  group=params.user_group,
                  mode=0755)