Exemplo n.º 1
0
def is_enabled(fs=None):
    if fs == 'hdfs':
        return has_hdfs_enabled()
    elif fs == 'adl':
        return is_adls_enabled()
    elif fs == 's3a':
        return is_s3_enabled()
    elif fs == 'abfs':
        return is_abfs_enabled()
Exemplo n.º 2
0
    def setUp(self):
        if not is_abfs_enabled():
            raise SkipTest
        self.client = ABFS.from_config(
            ABFS_CLUSTERS['default'],
            ActiveDirectory.from_config(AZURE_ACCOUNTS['default'],
                                        version='v2.0'))
        self.c = make_logged_in_client(username='******', is_superuser=False)
        grant_access('test', 'test', 'filebrowser')
        add_to_group('test')
        self.user = User.objects.get(username="******")

        self.test_fs = 'abfs://test' + (str(int(time.time())))
        LOG.debug("%s" % self.test_fs)
        self.client.mkdir(self.test_fs)
Exemplo n.º 3
0
################################################################
# Register file upload handlers
# This section must go after the desktop lib modules are loaded
################################################################

# Insert our custom upload handlers
file_upload_handlers = [
    'hadoop.fs.upload.HDFSfileUploadHandler',
    'django.core.files.uploadhandler.MemoryFileUploadHandler',
    'django.core.files.uploadhandler.TemporaryFileUploadHandler',
]

if is_s3_enabled():
    file_upload_handlers.insert(0, 'aws.s3.upload.S3FileUploadHandler')

if is_abfs_enabled():
    file_upload_handlers.insert(0, 'azure.abfs.upload.ABFSFileUploadHandler')
FILE_UPLOAD_HANDLERS = tuple(file_upload_handlers)

############################################################

# Necessary for South to not fuzz with tests.  Fixed in South 0.7.1
SKIP_SOUTH_TESTS = True

# Set up environment variable so Kerberos libraries look at our private
# ticket cache
os.environ['KRB5CCNAME'] = desktop.conf.KERBEROS.CCACHE_PATH.get()
if not os.getenv('SERVER_SOFTWARE'):
    os.environ['SERVER_SOFTWARE'] = 'apache'

# If Hue is configured to use a CACERTS truststore, make sure that the
Exemplo n.º 4
0
def config_validator(user):
  '''
  v2
  When using the connectors, now 'hive' is seen as a dialect and only the list of connections
  (instance of the 'hive' connector, e.g. pointing to a Hive server in the Cloud) should be tested.
  Interpreters are now tested by the Editor in libs/notebook/conf.py.

  v1
  All the configuration happens in apps/beeswax.
  '''
  from beeswax.design import hql_query # dbms is dependent on beeswax.conf, import in method to avoid circular dependency
  from beeswax.server import dbms

  res = []

  if has_connectors():
    return res

  try:
    try:
      if not 'test' in sys.argv:  # Avoid tests hanging
        server = dbms.get(user)
        query = hql_query("SELECT 'Hello World!';")
        handle = server.execute_and_wait(query, timeout_sec=10.0)

        if handle:
          server.fetch(handle, rows=100)
          server.close(handle)
    except StructuredThriftTransportException as e:
      if 'Error validating the login' in str(e):
        msg = 'Failed to authenticate to HiveServer2, check authentication configurations.'
        LOG.exception(msg)
        res.append((NICE_NAME, _(msg)))
      else:
        raise e
  except Exception as e:
    msg = "The application won't work without a running HiveServer2."
    LOG.exception(msg)
    res.append((NICE_NAME, _(msg)))

  try:
    from desktop.lib.fsmanager import get_filesystem
    from aws.conf import is_enabled as is_s3_enabled
    from azure.conf import is_abfs_enabled
    warehouse = beeswax.hive_site.get_metastore_warehouse_dir()
    fs = get_filesystem()
    fs_scheme = fs._get_scheme(warehouse)
    if fs:
      if fs_scheme == 's3a':
        if is_s3_enabled():
          fs.do_as_user(user, fs.stats, warehouse)
        else:
          LOG.warn("Warehouse is in S3, but no credential available.")
      elif fs_scheme == 'abfs':
        if is_abfs_enabled():
          fs.do_as_user(user, fs.stats, warehouse)
        else:
          LOG.warn("Warehouse is in ABFS, but no credential available.")
      else:
        fs.do_as_superuser(fs.stats, warehouse)
  except Exception:
    msg = 'Failed to access Hive warehouse: %s'
    LOG.exception(msg % warehouse)
    res.append((NICE_NAME, _(msg) % warehouse))

  return res
Exemplo n.º 5
0
 def setUp(self):
     if not is_abfs_enabled():
         raise SkipTest
     self.client = ABFS.from_config(
         ABFS_CLUSTERS['default'],
         ActiveDirectory.from_config(None, version='v2.0'))