def execute(configurations={}, parameters={}, host_name=None):
    BASE_URL = "http://{0}:{1}/pxf/".format(host_name, PXF_PORT)
    try:
        # Get delegation token if security is enabled
        if CLUSTER_ENV_SECURITY in configurations and configurations[
                CLUSTER_ENV_SECURITY].lower() == "true":
            if 'dfs.nameservices' in configurations[HDFS_SITE]:
                namenode_address = get_active_namenode(
                    ConfigDictionary(configurations[HDFS_SITE]),
                    configurations[CLUSTER_ENV_SECURITY],
                    configurations[HADOOP_ENV_HDFS_USER])[1]
            else:
                namenode_address = configurations[HDFS_SITE][
                    'dfs.namenode.http-address']

            token = _get_delegation_token(
                namenode_address, configurations[HADOOP_ENV_HDFS_USER],
                configurations[HADOOP_ENV_HDFS_USER_KEYTAB],
                configurations[HADOOP_ENV_HDFS_PRINCIPAL_NAME], None)
            commonPXFHeaders.update({"X-GP-TOKEN": token})

        if _get_pxf_protocol_version(BASE_URL).startswith("v"):
            return (RESULT_STATE_OK, ['PXF is functional'])

        message = "Unable to determine PXF version"
        logger.exception(message)
        raise Exception(message)

    except Exception as e:
        message = 'PXF is not functional on host, {0}: {1}'.format(
            host_name, e)
        logger.exception(message)
        return (RESULT_STATE_WARNING, [message])
Example #2
0
is_hive_installed = default("/clusterHostInfo/hive_server_hosts",
                            None) is not None

# HDFS
hdfs_site = config['configurations']['hdfs-site']
default_fs = config['configurations']['core-site']['fs.defaultFS']
namenode_path = default('/configurations/hdfs-site/dfs.namenode.http-address',
                        None)
dfs_nameservice = default(
    '/configurations/hdfs-site/dfs.internal.nameservices', None)
if dfs_nameservice is None:
    dfs_nameservice = default('/configurations/hdfs-site/dfs.nameservices',
                              None)

if dfs_nameservice:
    namenode_path = get_active_namenode(hdfs_site, security_enabled,
                                        hdfs_user)[1]

# keytabs and principals
kinit_path_local = get_kinit_path(
    default('/configurations/kerberos-env/executable_search_paths', None))
hdfs_user_keytab = default('configurations/hadoop-env/hdfs_user_keytab', None)
hdfs_principal_name = default('configurations/hadoop-env/hdfs_principal_name',
                              None)
hbase_user_keytab = default('configurations/hbase-env/hbase_user_keytab', None)
hbase_principal_name = default('configurations/hbase-env/hbase_principal_name',
                               None)
dfs_type = default("/clusterLevelParams/dfs_type", "")

# HDFSResource partial function
HdfsResource = functools.partial(HdfsResource,
                                 user=hdfs_user,