Esempio n. 1
0
    def test_fs_permissions_admin_user(self):
        user_client = make_logged_in_client(username='******',
                                            groupname='default',
                                            recreate=True,
                                            is_superuser=True)
        user = User.objects.get(username='******')

        s3fs, adls, hdfs, abfs, gs = MockFs("s3_access"), MockFs(
            "adls_access"), MockFs(), MockFs("abfs_access"), MockFs(
                "gs_access")
        proxy_fs = ProxyFS(
            {
                's3a': wrapper(s3fs),
                'hdfs': wrapper(hdfs),
                'adl': wrapper(adls),
                'abfs': wrapper(abfs),
                'gs': wrapper(gs)
            }, 'hdfs')
        proxy_fs.setuser(user)

        f = proxy_fs._get_fs

        f('s3a://bucket')
        f('S3A://bucket/key')
        f('adl://net/key')
        f('adl:/key')
        f('abfs:/key')
        f('hdfs://path')
        f('/tmp')
        f('gs://bucket/key')
Esempio n. 2
0
  def test_fs_permissions_regular_user(self):
    user_client = make_logged_in_client(username='******', groupname='default', recreate=True, is_superuser=False)
    user = User.objects.get(username='******')

    proxy_fs = ProxyFS({'s3a': MockFs(), 'hdfs': MockFs()}, 'hdfs')
    proxy_fs.setuser(user)

    f = proxy_fs._get_fs

    remove_from_group(user.username, 'has_s3')

    # No perms by default
    assert_raises(Exception, f, 's3a://bucket')
    assert_raises(Exception, f, 'S3A://bucket/key')
    f('hdfs://path')
    f('/tmp')

    try:
      # Add perm
      add_permission(user.username, 'has_s3', permname='s3_access', appname='filebrowser')

      f('s3a://bucket')
      f('S3A://bucket/key')
      f('hdfs://path')
      f('/tmp')
    finally:
      remove_from_group(user.username, 'has_s3')
Esempio n. 3
0
def test__get_fs():
    make_logged_in_client(username='******',
                          groupname='default',
                          recreate=True,
                          is_superuser=False)
    user = User.objects.get(username='******')
    add_permission('test',
                   'has_s3',
                   permname='s3_access',
                   appname='filebrowser')
    add_permission('test',
                   'has_adls',
                   permname='adls_access',
                   appname='filebrowser')

    s3fs, adls, hdfs = MockFs("s3_access"), MockFs("adls_access"), MockFs()
    proxy_fs = ProxyFS({'s3a': s3fs, 'hdfs': hdfs, 'adl': adls}, 'hdfs')
    proxy_fs.setuser(user)

    f = proxy_fs._get_fs

    eq_(f('s3a://bucket'), s3fs)
    eq_(f('S3A://bucket/key'), s3fs)
    eq_(f('adl:/path'), adls)
    eq_(f('adl://net/path'), adls)
    eq_(f('hdfs:/path'), hdfs)
    eq_(f('hdfs://net/path'), hdfs)
    eq_(f('/tmp'), hdfs)

    assert_raises(IOError, f, 'ftp://host')
Esempio n. 4
0
  def test_fs_permissions_regular_user(self):
    user_client = make_logged_in_client(username='******', groupname='default', recreate=True, is_superuser=False)
    user = User.objects.get(username='******')

    s3fs, adls, hdfs = MockFs("s3_access"), MockFs("adls_access"), MockFs()
    proxy_fs = ProxyFS({'s3a': s3fs, 'hdfs': hdfs, 'adl': adls}, 'hdfs')
    proxy_fs.setuser(user)

    f = proxy_fs._get_fs

    remove_from_group(user.username, 'has_s3')
    remove_from_group(user.username, 'has_adls')

    # No perms by default
    assert_raises(Exception, f, 's3a://bucket')
    assert_raises(Exception, f, 'S3A://bucket/key')
    assert_raises(Exception, f, 'adl://net/key')
    assert_raises(Exception, f, 'adl:/key')
    f('hdfs://path')
    f('/tmp')

    try:
      # Add perm
      add_permission('test', 'has_s3', permname='s3_access', appname='filebrowser')
      add_permission('test', 'has_adls', permname='adls_access', appname='filebrowser')

      f('s3a://bucket')
      f('S3A://bucket/key')
      f('adl://net/key')
      f('adl:/key')
      f('hdfs://path')
      f('/tmp')
    finally:
      remove_from_group(user.username, 'has_s3')
      remove_from_group(user.username, 'has_adls')
Esempio n. 5
0
def test_multi_fs_selection():
  try:
    from mock import MagicMock
  except ImportError:
    raise SkipTest("Skips until HUE-2947 is resolved")

  make_logged_in_client(username='******', groupname='default', recreate=True, is_superuser=False)
  user = User.objects.get(username='******')
  add_permission('test', 'has_s3', permname='s3_access', appname='filebrowser')

  s3fs, hdfs = MagicMock(), MagicMock()
  proxy_fs = ProxyFS({'s3a': s3fs, 'hdfs': hdfs}, 'hdfs')
  proxy_fs.setuser(user)

  proxy_fs.copy('s3a://bucket1/key', 's3a://bucket2/key')
  s3fs.copy.assert_called_once_with('s3a://bucket1/key', 's3a://bucket2/key')
  assert_false(hdfs.copy.called)

  proxy_fs.copyfile('s3a://bucket/key', 'key2')
  s3fs.copyfile.assert_called_once_with('s3a://bucket/key', 'key2')
  assert_false(hdfs.copyfile.called)

  proxy_fs.rename('/tmp/file', 'shmile')
  hdfs.rename.assert_called_once_with('/tmp/file', 'shmile')
  assert_false(s3fs.rename.called)

  # Will be addressed in HUE-2934
  assert_raises(NotImplementedError, proxy_fs.copy_remote_dir, 's3a://bucket/key', '/tmp/dir')
Esempio n. 6
0
def test_fs_selection():
  try:
    from mock import MagicMock
  except ImportError:
    raise SkipTest("Skips until HUE-2947 is resolved")

  make_logged_in_client(username='******', groupname='default', recreate=True, is_superuser=False)
  user = User.objects.get(username='******')
  add_permission('test', 'has_s3', permname='s3_access', appname='filebrowser')

  s3fs, hdfs = MagicMock(), MagicMock()
  proxy_fs = ProxyFS({'s3a': s3fs, 'hdfs': hdfs}, 'hdfs')
  proxy_fs.setuser(user)

  proxy_fs.isdir('s3a://bucket/key')
  s3fs.isdir.assert_called_once_with('s3a://bucket/key')
  assert_false(hdfs.isdir.called)

  proxy_fs.isfile('hdfs://localhost:42/user/alice/file')
  hdfs.isfile.assert_called_once_with('hdfs://localhost:42/user/alice/file')
  assert_false(s3fs.isfile.called)

  proxy_fs.open('/user/alice/file')
  hdfs.open.assert_called_once_with('/user/alice/file')
  assert_false(s3fs.open.called)

  assert_raises(IOError, proxy_fs.stats, 'ftp://host')
  assert_raises(IOError, proxy_fs.stats, 's3//bucket/key')
Esempio n. 7
0
  def test_fs_permissions_admin_user(self):
    user_client = make_logged_in_client(username='******', groupname='default', recreate=True, is_superuser=True)
    user = User.objects.get(username='******')

    proxy_fs = ProxyFS({'s3a': MockFs(), 'hdfs': MockFs()}, 'hdfs')
    proxy_fs.setuser(user)

    f = proxy_fs._get_fs

    f('s3a://bucket')
    f('S3A://bucket/key')
    f('hdfs://path')
    f('/tmp')
Esempio n. 8
0
  def test_fs_permissions_admin_user(self):
    user_client = make_logged_in_client(username='******', groupname='default', recreate=True, is_superuser=True)
    user = User.objects.get(username='******')

    s3fs, adls, hdfs = MockFs("s3_access"), MockFs("adls_access"), MockFs()
    proxy_fs = ProxyFS({'s3a': s3fs, 'hdfs': hdfs, 'adl': adls}, 'hdfs')
    proxy_fs.setuser(user)

    f = proxy_fs._get_fs

    f('s3a://bucket')
    f('S3A://bucket/key')
    f('adl://net/key')
    f('adl:/key')
    f('hdfs://path')
    f('/tmp')
Esempio n. 9
0
def test__get_fs_pair():
  make_logged_in_client(username='******', groupname='default', recreate=True, is_superuser=False)
  user = User.objects.get(username='******')
  add_permission('test', 'has_s3', permname='s3_access', appname='filebrowser')

  s3fs, hdfs = MockFs(), MockFs()
  proxy_fs = ProxyFS({'s3a': s3fs, 'hdfs': hdfs}, 'hdfs')
  proxy_fs.setuser(user)

  f = proxy_fs._get_fs_pair

  eq_(f('s3a://bucket1/key', 's3a://bucket2/key'), (s3fs, s3fs))
  eq_(f('s3a://bucket/key', 'key2'), (s3fs, s3fs))
  eq_(f('/tmp/file', 'shmile'), (hdfs, hdfs))

  assert_raises(IOError, f, 'ftp://host', 'key2')
  assert_raises(IOError, f, 's3//bucket/key', 'hdfs://normal/path')
Esempio n. 10
0
def test__get_fs_pair():
    s3fs, hdfs = 'fake_s3', 'fake_hdfs'
    proxy_fs = ProxyFS({'s3a': s3fs, 'hdfs': hdfs}, 'hdfs')
    f = proxy_fs._get_fs_pair

    eq_(f('s3a://bucket1/key', 's3a://bucket2/key'), (s3fs, s3fs))
    eq_(f('s3a://bucket/key', 'key2'), (s3fs, s3fs))
    eq_(f('/tmp/file', 'shmile'), (hdfs, hdfs))

    assert_raises(IOError, f, 'ftp://host', 'key2')
    assert_raises(IOError, f, 's3//bucket/key', 'hdfs://normal/path')
Esempio n. 11
0
def test__get_fs():
    s3fs, hdfs = 'fake_s3', 'fake_hdfs'
    proxy_fs = ProxyFS({'s3a': s3fs, 'hdfs': hdfs}, 'hdfs')
    f = proxy_fs._get_fs

    eq_(f('s3a://bucket'), s3fs)
    eq_(f('S3A://bucket/key'), s3fs)
    eq_(f('hdfs://path'), hdfs)
    eq_(f('/tmp'), hdfs)

    assert_raises(IOError, f, 'ftp://host')
    assert_raises(IOError, f, 's3//bucket/key')
Esempio n. 12
0
def test__get_fs():
  make_logged_in_client(username='******', groupname='default', recreate=True, is_superuser=False)
  user = User.objects.get(username='******')
  add_permission('test', 'has_s3', permname='s3_access', appname='filebrowser')
  add_permission('test', 'has_adls', permname='adls_access', appname='filebrowser')

  s3fs, adls, hdfs = MockFs("s3_access"), MockFs("adls_access"), MockFs()
  proxy_fs = ProxyFS({'s3a': s3fs, 'hdfs': hdfs, 'adl': adls}, 'hdfs')
  proxy_fs.setuser(user)

  f = proxy_fs._get_fs

  eq_(f('s3a://bucket'), s3fs)
  eq_(f('S3A://bucket/key'), s3fs)
  eq_(f('adl:/path'), adls)
  eq_(f('adl://net/path'), adls)
  eq_(f('hdfs:/path'), hdfs)
  eq_(f('hdfs://net/path'), hdfs)
  eq_(f('/tmp'), hdfs)

  assert_raises(IOError, f, 'ftp://host')
Esempio n. 13
0
def _make_fs(name):
    fs_dict = {}
    for schema, getter in FS_GETTERS.iteritems():
        try:
            fs = getter(name)
            fs_dict[schema] = fs
        except KeyError:
            if DEFAULT_SCHEMA == schema:
                logging.error(
                    'Can not get filesystem called "%s" for default schema "%s"'
                    % (name, schema))
                exc_class, exc, tb = sys.exc_info()
                raise exc_class, exc, tb
            else:
                logging.warn(
                    'Can not get filesystem called "%s" for "%s" schema' %
                    (name, schema))
    return ProxyFS(fs_dict, DEFAULT_SCHEMA)
Esempio n. 14
0
def test_multi_fs_selection():
    try:
        from mock import MagicMock
    except ImportError:
        raise SkipTest("Skips until HUE-2947 is resolved")
    s3fs, hdfs = MagicMock(), MagicMock()
    proxy_fs = ProxyFS({'s3a': s3fs, 'hdfs': hdfs}, 'hdfs')

    proxy_fs.copy('s3a://bucket1/key', 's3a://bucket2/key')
    s3fs.copy.assert_called_once_with('s3a://bucket1/key', 's3a://bucket2/key')
    assert_false(hdfs.copy.called)

    proxy_fs.copyfile('s3a://bucket/key', 'key2')
    s3fs.copyfile.assert_called_once_with('s3a://bucket/key', 'key2')
    assert_false(hdfs.copyfile.called)

    proxy_fs.rename('/tmp/file', 'shmile')
    hdfs.rename.assert_called_once_with('/tmp/file', 'shmile')
    assert_false(s3fs.rename.called)

    # Will be addressed in HUE-2934
    assert_raises(NotImplementedError, proxy_fs.copy_remote_dir,
                  's3a://bucket/key', '/tmp/dir')
Esempio n. 15
0
def test_fs_selection():
    try:
        from mock import MagicMock
    except ImportError:
        raise SkipTest("Skips until HUE-2947 is resolved")

    s3fs, hdfs = MagicMock(), MagicMock()
    proxy_fs = ProxyFS({'s3a': s3fs, 'hdfs': hdfs}, 'hdfs')

    proxy_fs.isdir('s3a://bucket/key')
    s3fs.isdir.assert_called_once_with('s3a://bucket/key')
    assert_false(hdfs.isdir.called)

    proxy_fs.isfile('hdfs://localhost:42/user/alice/file')
    hdfs.isfile.assert_called_once_with('hdfs://localhost:42/user/alice/file')
    assert_false(s3fs.isfile.called)

    proxy_fs.open('/user/alice/file')
    hdfs.open.assert_called_once_with('/user/alice/file')
    assert_false(s3fs.open.called)

    assert_raises(IOError, proxy_fs.stats, 'ftp://host')
    assert_raises(IOError, proxy_fs.stats, 's3//bucket/key')
Esempio n. 16
0
def test_multi_fs_selection():
  try:
    from mock import MagicMock
  except ImportError:
    raise SkipTest("Skips until HUE-2947 is resolved")
  s3fs, hdfs = MagicMock(), MagicMock()
  proxy_fs = ProxyFS({'s3a': s3fs, 'hdfs': hdfs}, 'hdfs')

  proxy_fs.copy('s3a://bucket1/key', 's3a://bucket2/key')
  s3fs.copy.assert_called_once_with('s3a://bucket1/key', 's3a://bucket2/key')
  assert_false(hdfs.copy.called)

  proxy_fs.copyfile('s3a://bucket/key', 'key2')
  s3fs.copyfile.assert_called_once_with('s3a://bucket/key', 'key2')
  assert_false(hdfs.copyfile.called)

  proxy_fs.rename('/tmp/file', 'shmile')
  hdfs.rename.assert_called_once_with('/tmp/file', 'shmile')
  assert_false(s3fs.rename.called)

  # Will be addressed in HUE-2934
  assert_raises(NotImplementedError, proxy_fs.copy_remote_dir, 's3a://bucket/key', '/tmp/dir')
Esempio n. 17
0
def test_fs_selection():
  try:
    from mock import MagicMock
  except ImportError:
    raise SkipTest("Skips until HUE-2947 is resolved")

  s3fs, hdfs = MagicMock(), MagicMock()
  proxy_fs = ProxyFS({'s3a': s3fs, 'hdfs': hdfs}, 'hdfs')

  proxy_fs.isdir('s3a://bucket/key')
  s3fs.isdir.assert_called_once_with('s3a://bucket/key')
  assert_false(hdfs.isdir.called)

  proxy_fs.isfile('hdfs://localhost:42/user/alice/file')
  hdfs.isfile.assert_called_once_with('hdfs://localhost:42/user/alice/file')
  assert_false(s3fs.isfile.called)

  proxy_fs.open('/user/alice/file')
  hdfs.open.assert_called_once_with('/user/alice/file')
  assert_false(s3fs.open.called)

  assert_raises(IOError, proxy_fs.stats, 'ftp://host')
  assert_raises(IOError, proxy_fs.stats, 's3//bucket/key')
Esempio n. 18
0
            else:
                raise Exception('Filesystem not configured for %s' % schema)
        except KeyError:
            if DEFAULT_SCHEMA == schema:
                logging.error(
                    'Can not get filesystem called "%s" for default schema "%s"'
                    % (name, schema))
                exc_class, exc, tb = sys.exc_info()
                raise exc_class, exc, tb
            else:
                logging.warn(
                    'Can not get filesystem called "%s" for "%s" schema' %
                    (name, schema))
        except Exception, e:
            logging.warn(e)
    return ProxyFS(fs_dict, DEFAULT_SCHEMA)


def clear_cache():
    """
  Clears internal cache.  Returns
  something that can be given back to restore_cache.
  """
    global FS_CACHE
    old = FS_CACHE
    FS_CACHE = {}
    return old


def restore_cache(old_cache):
    """
Esempio n. 19
0
def test_multi_fs_selection():
  make_logged_in_client(username='******', groupname='default', recreate=True, is_superuser=False)
  user = User.objects.get(username='******')

  with patch('desktop.lib.fs.ProxyFS._has_access') as _has_access:
    _has_access.return_value = True

    s3fs, adls, hdfs, abfs, gs = MagicMock(), MagicMock(), MagicMock(), MagicMock(), MagicMock()
    proxy_fs = ProxyFS({'s3a': wrapper(s3fs), 'hdfs': wrapper(hdfs), 'adl': wrapper(adls), 'abfs': wrapper(abfs), 'gs': wrapper(gs)}, 'hdfs')
    proxy_fs.setuser(user)

    proxy_fs.copy('s3a://bucket1/key', 's3a://bucket2/key')
    s3fs.copy.assert_called_once_with('s3a://bucket1/key', 's3a://bucket2/key')
    assert_false(hdfs.copy.called)

    proxy_fs.copyfile('s3a://bucket/key', 'key2')
    s3fs.copyfile.assert_called_once_with('s3a://bucket/key', 'key2')
    assert_false(hdfs.copyfile.called)

    proxy_fs.copyfile('adl://net/key', 'key2')
    adls.copyfile.assert_called_once_with('adl://net/key', 'key2')
    assert_false(hdfs.copyfile.called)

    proxy_fs.copyfile('abfs:/key', 'key2')
    abfs.copyfile.assert_called_once_with('abfs:/key', 'key2')
    assert_false(hdfs.copyfile.called)

    proxy_fs.rename('/tmp/file', 'shmile')
    hdfs.rename.assert_called_once_with('/tmp/file', 'shmile')
    assert_false(s3fs.rename.called)

    proxy_fs.copyfile('gs://bucket/key', 'key2')
    gs.copyfile.assert_called_once_with('gs://bucket/key', 'key2')
    assert_false(hdfs.copyfile.called)

    # Will be addressed in HUE-2934
    assert_raises(NotImplementedError, proxy_fs.copy_remote_dir, 's3a://bucket/key', 'adl://tmp/dir') # Exception can only be thrown if scheme is specified, else default to 1st scheme
Esempio n. 20
0
def test_fs_selection():
  make_logged_in_client(username='******', groupname='default', recreate=True, is_superuser=False)
  user = User.objects.get(username='******')
  with patch('desktop.lib.fs.ProxyFS._has_access') as _has_access:
    _has_access.return_value = True

    s3fs, adls, hdfs, abfs, gs = MagicMock(), MagicMock(), MagicMock(), MagicMock(), MagicMock()
    proxy_fs = ProxyFS({'s3a': wrapper(s3fs), 'hdfs': wrapper(hdfs), 'adl': wrapper(adls), 'abfs': wrapper(abfs), 'gs': wrapper(gs)}, 'hdfs')
    proxy_fs.setuser(user)

    proxy_fs.isdir('s3a://bucket/key')
    s3fs.isdir.assert_called_once_with('s3a://bucket/key')
    assert_false(hdfs.isdir.called)

    proxy_fs.isfile('hdfs://localhost:42/user/alice/file')
    hdfs.isfile.assert_called_once_with('hdfs://localhost:42/user/alice/file')
    assert_false(s3fs.isfile.called)

    proxy_fs.isdir('adl://net/key')
    adls.isdir.assert_called_once_with('adl://net/key')
    assert_false(hdfs.isdir.called)

    proxy_fs.isdir('abfs://net/key')
    abfs.isdir.assert_called_once_with('abfs://net/key')
    assert_false(hdfs.isdir.called)

    proxy_fs.isdir('gs://net/key')
    gs.isdir.assert_called_once_with('gs://net/key')
    assert_false(hdfs.isdir.called)

    assert_raises(IOError, proxy_fs.stats, 'ftp://host')