Exemple #1
0
def test_fs_configuration(fs_config, hadoop_bin_conf):
  """Test FS configuration. Returns list of (confvar, error)."""
  TEST_FILE = '/tmp/.hue_config_test.%s' % (random.randint(0, 9999999999))
  res = [ ]

  res.extend(validate_port(fs_config.NN_THRIFT_PORT))
  res.extend(validate_port(fs_config.NN_HDFS_PORT))
  if res:
    return res

  # Check thrift plugin
  try:
    fs = HadoopFileSystem(fs_config.NN_HOST.get(),
                          fs_config.NN_THRIFT_PORT.get(),
                          fs_config.NN_HDFS_PORT.get(),
                          hadoop_bin_conf.get())

    fs.setuser(fs.superuser)
    ls = fs.listdir('/')
  except TTransport.TTransportException:
    msg = 'Failed to contact Namenode plugin at %s:%s.' % \
            (fs_config.NN_HOST.get(), fs_config.NN_THRIFT_PORT.get())
    LOG.exception(msg)
    res.append((fs_config, msg))
    return res
  except (IOError, IOException):
    msg = 'Failed to see HDFS root directory at %s. Please check HDFS configuration.' % (fs.uri,)
    LOG.exception(msg)
    res.append((fs_config, msg))
    return res

  if 'tmp' not in ls:
    return res

  # Check nn port (via upload)
  try:
    w_file = fs.open(TEST_FILE, 'w')
  except OSError, ex:
    msg = 'Failed to execute Hadoop (%s)' % (hadoop_bin_conf.get(),)
    LOG.exception(msg)
    res.append((hadoop_bin_conf, msg))
    return res
Exemple #2
0
def test_fs_configuration(fs_config, hadoop_bin_conf):
    """Test FS configuration. Returns list of (confvar, error)."""
    TEST_FILE = '/tmp/.hue_config_test.%s' % (random.randint(0, 9999999999))
    res = []

    res.extend(validate_port(fs_config.NN_THRIFT_PORT))
    res.extend(validate_port(fs_config.NN_HDFS_PORT))
    if res:
        return res

    # Check thrift plugin
    try:
        fs = HadoopFileSystem.from_config(
            fs_config, hadoop_bin_path=hadoop_bin_conf.get())

        fs.setuser(fs.superuser)
        ls = fs.listdir('/')
    except TTransport.TTransportException:
        msg = 'Failed to contact Namenode plugin at %s:%s.' % \
                (fs_config.NN_HOST.get(), fs_config.NN_THRIFT_PORT.get())
        LOG.exception(msg)
        res.append((fs_config, msg))
        return res
    except (IOError, IOException):
        msg = 'Failed to see HDFS root directory at %s. Please check HDFS configuration.' % (
            fs.uri, )
        LOG.exception(msg)
        res.append((fs_config, msg))
        return res

    if 'tmp' not in ls:
        return res

    # Check nn port (via upload)
    try:
        w_file = fs.open(TEST_FILE, 'w')
    except OSError, ex:
        msg = 'Failed to execute Hadoop (%s)' % (hadoop_bin_conf.get(), )
        LOG.exception(msg)
        res.append((hadoop_bin_conf, msg))
        return res
Exemple #3
0
def test_jt_configuration(cluster):
    """Test FS configuration. Returns list of (confvar, error)."""
    err = validate_port(cluster.JT_THRIFT_PORT)
    if err:
        return err

    try:
        jt = LiveJobTracker(cluster.JT_HOST.get(), cluster.JT_THRIFT_PORT.get())
        jt.runtime_info()
    except TTransport.TTransportException:
        msg = "Failed to contact JobTracker plugin at %s:%s." % (cluster.JT_HOST.get(), cluster.JT_THRIFT_PORT.get())
        return [(cluster, msg)]
    return []
Exemple #4
0
def test_jt_configuration(cluster):
    """Test FS configuration. Returns list of (confvar, error)."""
    err = validate_port(cluster.JT_THRIFT_PORT)
    if err:
        return err

    try:
        jt = LiveJobTracker.from_conf(cluster)
        jt.runtime_info()
    except TTransport.TTransportException:
        msg = 'Failed to contact JobTracker plugin at %s:%s.' % \
              (cluster.JT_HOST.get(), cluster.JT_THRIFT_PORT.get())
        return [(cluster, msg)]
    return []