Exemplo n.º 1
0
    def listdir_stats(self, path, glob=None):
        if glob is not None:
            raise NotImplementedError(_("Option `glob` is not implemented"))

        if s3.is_root(path):
            try:
                return sorted([
                    S3Stat.from_bucket(b)
                    for b in self._s3_connection.get_all_buckets()
                ],
                              key=lambda x: x.name)
            except S3FileSystemException as e:
                raise e
            except S3ResponseError as e:
                raise S3FileSystemException(
                    _('Failed to retrieve buckets: %s') % e.reason)
            except Exception as e:
                raise S3FileSystemException(
                    _('Failed to retrieve buckets: %s') % e)

        bucket_name, prefix = s3.parse_uri(path)[:2]
        bucket = self._get_bucket(bucket_name)
        prefix = self._append_separator(prefix)
        res = []
        for item in bucket.list(prefix=prefix, delimiter='/'):
            if isinstance(item, Prefix):
                res.append(
                    S3Stat.from_key(Key(item.bucket, item.name), is_dir=True))
            else:
                if item.name == prefix:
                    continue
                res.append(self._stats_key(item))
        return res
Exemplo n.º 2
0
  def listdir_stats(self, path, glob=None):
    if glob is not None:
      raise NotImplementedError(_("Option `glob` is not implemented"))

    if S3FileSystem.isroot(path):
      try:
        return sorted([S3Stat.from_bucket(b, self.fs) for b in self._s3_connection.get_all_buckets(headers=self.header_values)], key=lambda x: x.name)
      except S3FileSystemException as e:
        raise e
      except S3ResponseError as e:
        if 'Forbidden' in str(e) or (hasattr(e, 'status') and e.status == 403):
          raise S3ListAllBucketsException(_('You do not have permissions to list all buckets. Please specify a bucket name you have access to.'))
        else:
          raise S3FileSystemException(_('Failed to retrieve buckets: %s') % e.reason)
      except Exception as e:
        raise S3FileSystemException(_('Failed to retrieve buckets: %s') % e)

    bucket_name, prefix = s3.parse_uri(path)[:2]
    bucket = self._get_bucket(bucket_name)
    prefix = self._append_separator(prefix)
    res = []
    for item in bucket.list(prefix=prefix, delimiter='/', headers=self.header_values):
      if isinstance(item, Prefix):
        res.append(S3Stat.from_key(Key(item.bucket, item.name), is_dir=True, fs=self.fs))
      else:
        if item.name == prefix:
          continue
        res.append(self._stats_key(item, self.fs))
    return res
Exemplo n.º 3
0
 def _stats_key(key, fs='s3a'):
   if key.size is not None:
     is_directory_name = not key.name or key.name[-1] == '/'
     return S3Stat.from_key(key, is_dir=is_directory_name, fs=fs)
   else:
     key.name = S3FileSystem._append_separator(key.name)
     ls = key.bucket.get_all_keys(prefix=key.name, max_keys=1)  # Not sure possible via signed request
     if len(ls) > 0:
       return S3Stat.from_key(key, is_dir=True, fs=fs)
   return None
Exemplo n.º 4
0
 def _stats_key(key):
   if key.size is not None:
     is_directory_name = not key.name or key.name[-1] == '/'
     return S3Stat.from_key(key, is_dir=is_directory_name)
   else:
     key.name = S3FileSystem._append_separator(key.name)
     ls = key.bucket.get_all_keys(prefix=key.name, max_keys=1)
     if len(ls) > 0:
       return S3Stat.from_key(key, is_dir=True)
   return None
Exemplo n.º 5
0
 def _stats_key(key):
     if key.size is not None:
         is_directory_name = not key.name or key.name[-1] == '/'
         return S3Stat.from_key(key, is_dir=is_directory_name)
     else:
         key.name = S3FileSystem._append_separator(key.name)
         ls = key.bucket.get_all_keys(prefix=key.name, max_keys=1)
         if len(ls) > 0:
             return S3Stat.from_key(key, is_dir=True)
     return None
Exemplo n.º 6
0
def test_derivable_properties():
    s = S3Stat('foo', 's3://bar/foo', False, 40, 1424983327)
    eq_('FILE', s.type)
    eq_(0666 | stat.S_IFREG, s.mode)
    eq_('', s.user)
    eq_('', s.group)
    eq_(1424983327, s.atime)
    eq_(False, s.aclBit)

    s = S3Stat('bar', 's3://bar', True, 0, 1424983327)
    eq_('DIRECTORY', s.type)
    eq_(0777 | stat.S_IFDIR, s.mode)
Exemplo n.º 7
0
def test_from_key():
  key = FakeKey('foo', FakeBucket('bar'), 42, 'Thu, 26 Feb 2015 20:42:07 GMT')
  s = S3Stat.from_key(key)
  eq_('FILE', s.type)
  eq_('foo', s.name)
  eq_('s3://bar/foo', s.path)
  eq_(42, s.size)
  eq_(1424983327, s.mtime)

  key.size = None
  key.last_modified = None
  s = S3Stat.from_key(key, is_dir=True)
  eq_('DIRECTORY', s.type)
  eq_(0, s.size)
  eq_(0, s.atime)
Exemplo n.º 8
0
def test_from_key():
  key = FakeKey('foo', FakeBucket('bar'), 42, 'Thu, 26 Feb 2015 20:42:07 GMT')
  s = S3Stat.from_key(key)
  eq_('FILE', s.type)
  eq_('foo', s.name)
  eq_('s3://bar/foo', s.path)
  eq_(42, s.size)
  eq_(1424983327, s.mtime)

  key.size = None
  key.last_modified = None
  s = S3Stat.from_key(key, is_dir=True)
  eq_('DIRECTORY', s.type)
  eq_(0, s.size)
  eq_(0, s.atime)
Exemplo n.º 9
0
def test_from_key():
    key = FakeKey("foo", FakeBucket("bar"), 42, "Thu, 26 Feb 2015 20:42:07 GMT")
    s = S3Stat.from_key(key)
    eq_("FILE", s.type)
    eq_("foo", s.name)
    eq_("s3a://bar/foo", s.path)
    eq_(42, s.size)
    eq_(1424983327, s.mtime)

    key.size = None
    key.last_modified = None
    s = S3Stat.from_key(key, is_dir=True)
    eq_("DIRECTORY", s.type)
    eq_(0, s.size)
    eq_(None, s.atime)
Exemplo n.º 10
0
def test_from_bucket():
  s = S3Stat.from_bucket(FakeBucket('boo'))
  eq_('DIRECTORY', s.type)
  eq_('boo', s.name)
  eq_('s3://boo', s.path)
  eq_(0, s.size)
  eq_(None, s.atime)
Exemplo n.º 11
0
def test_for_s3_root():
    s = S3Stat.for_s3_root()
    eq_('DIRECTORY', s.type)
    eq_('S3', s.name)
    eq_('s3://', s.path)
    eq_(0, s.size)
    eq_(None, s.atime)
Exemplo n.º 12
0
def test_from_bucket():
    s = S3Stat.from_bucket(FakeBucket("boo"))
    eq_("DIRECTORY", s.type)
    eq_("boo", s.name)
    eq_("s3a://boo", s.path)
    eq_(0, s.size)
    eq_(None, s.atime)
Exemplo n.º 13
0
def test_for_s3_root():
    s = S3Stat.for_s3_root()
    eq_("DIRECTORY", s.type)
    eq_("S3A", s.name)
    eq_("s3a://", s.path)
    eq_(0, s.size)
    eq_(None, s.atime)
Exemplo n.º 14
0
def test_from_bucket():
    s = S3Stat.from_bucket(FakeBucket('boo'))
    eq_('DIRECTORY', s.type)
    eq_('boo', s.name)
    eq_('s3://boo', s.path)
    eq_(0, s.size)
    eq_(None, s.atime)
Exemplo n.º 15
0
def test_for_s3_root():
  s = S3Stat.for_s3_root()
  eq_('DIRECTORY', s.type)
  eq_('S3', s.name)
  eq_('s3://', s.path)
  eq_(0, s.size)
  eq_(None, s.atime)
Exemplo n.º 16
0
  def _stats(self, path):
    if s3.is_root(path):
      return S3Stat.for_s3_root()

    try:
      key = self._get_key(path, validate=True)
    except BotoClientError, e:
      raise S3FileSystemException(_('Failed to access path "%s": %s') % (path, e.reason))
Exemplo n.º 17
0
  def listdir_stats(self, path, glob=None):
    if glob is not None:
      raise NotImplementedError(_("Option `glob` is not implemented"))

    if s3.is_root(path):
      self._init_bucket_cache()
      return [S3Stat.from_bucket(b) for b in self._bucket_cache.values()]

    bucket_name, prefix = s3.parse_uri(path)[:2]
    bucket = self._get_bucket(bucket_name)
    prefix = self._append_separator(prefix)
    res = []
    for item in bucket.list(prefix=prefix, delimiter='/'):
      if isinstance(item, Prefix):
        res.append(S3Stat.from_key(Key(item.bucket, item.name), is_dir=True))
      else:
        if item.name == prefix:
          continue
        res.append(self._stats_key(item))
    return res
Exemplo n.º 18
0
  def listdir_stats(self, path, glob=None):
    if glob is not None:
      raise NotImplementedError(_("Option `glob` is not implemented"))

    if s3.is_root(path):
      self._init_bucket_cache()
      return sorted([S3Stat.from_bucket(b) for b in self._bucket_cache.values()], key=lambda x: x.name)

    bucket_name, prefix = s3.parse_uri(path)[:2]
    bucket = self._get_bucket(bucket_name)
    prefix = self._append_separator(prefix)
    res = []
    for item in bucket.list(prefix=prefix, delimiter='/'):
      if isinstance(item, Prefix):
        res.append(S3Stat.from_key(Key(item.bucket, item.name), is_dir=True))
      else:
        if item.name == prefix:
          continue
        res.append(self._stats_key(item))
    return res
Exemplo n.º 19
0
  def listdir_stats(self, path, glob=None):
    if glob is not None:
      raise NotImplementedError(_("Option `glob` is not implemented"))

    if s3.is_root(path):
      try:
        return sorted([S3Stat.from_bucket(b) for b in self._s3_connection.get_all_buckets()], key=lambda x: x.name)
      except S3FileSystemException, e:
        raise e
      except S3ResponseError, e:
        raise S3FileSystemException(_('Failed to retrieve buckets: %s') % e.reason)
Exemplo n.º 20
0
    def _stats(self, path):
        if s3.is_root(path):
            return S3Stat.for_s3_root()

        try:
            key = self._get_key(path, validate=True)
        except S3ResponseError as e:
            if e.status == 404:
                return None
            else:
                exc_class, exc, tb = sys.exc_info()
                raise exc_class, exc, tb

        if key is None:
            key = self._get_key(path, validate=False)
        return self._stats_key(key)
Exemplo n.º 21
0
  def _stats(self, path):
    if s3.is_root(path):
      return S3Stat.for_s3_root()

    try:
      key = self._get_key(path, validate=True)
    except S3ResponseError as e:
      if e.status == 404:
        return None
      else:
        exc_class, exc, tb = sys.exc_info()
        raise exc_class, exc, tb

    if key is None:
      key = self._get_key(path, validate=False)
    return self._stats_key(key)
Exemplo n.º 22
0
  def _stats(self, path):
    if s3.is_root(path):
      return S3Stat.for_s3_root()

    try:
      key = self._get_key(path, validate=True)
    except S3ResponseError as e:
      if e.status == 404:
        return None
      elif e.status == 403:
        raise S3FileSystemException(_('User is not authorized to access path: "%s"') % path)
      else:
        raise S3FileSystemException(_('Failed to access path "%s": %s') % (path, e.reason))

    if key is None:
      key = self._get_key(path, validate=False)
    return self._stats_key(key)
Exemplo n.º 23
0
  def _stats(self, path):
    if s3.is_root(path):
      return S3Stat.for_s3_root()

    try:
      key = self._get_key(path, validate=True)
    except S3ResponseError as e:
      if e.status == 404:
        return None
      elif e.status == 403:
        raise S3FileSystemException(_('User is not authorized to access path: "%s"') % path)
      else:
        raise S3FileSystemException(_('Failed to access path "%s": %s') % (path, e.reason))

    if key is None:
      key = self._get_key(path, validate=False)
    return self._stats_key(key)
Exemplo n.º 24
0
  def _stats(self, path):
    if s3.is_root(path):
      return S3Stat.for_s3_root()

    try:
      key = self._get_key(path, validate=True)
    except BotoClientError as e:
      raise S3FileSystemException(_('Failed to access path "%s": %s') % (path, e.reason))
    except S3ResponseError as e:
      if e.status == 404:
        return None
      elif e.status == 403:
        raise S3FileSystemException(_('User is not authorized to access path: "%s"') % path)
      else:
        raise S3FileSystemException(_('Failed to access path "%s": %s') % (path, e.reason))
    except Exception as e: # SSL errors show up here, because they've been remapped in boto
      raise S3FileSystemException(_('Failed to access path "%s": %s') % (path, str(e)))
    if key is None:
      key = self._get_key(path, validate=False)
    return self._stats_key(key, self.fs)
Exemplo n.º 25
0
                raise e
            except S3ResponseError, e:
                raise S3FileSystemException(
                    _('Failed to retrieve buckets: %s') % e.reason)
            except Exception, e:
                raise S3FileSystemException(
                    _('Failed to retrieve buckets: %s') % e)

        bucket_name, prefix = s3.parse_uri(path)[:2]
        bucket = self._get_bucket(bucket_name)
        prefix = self._append_separator(prefix)
        res = []
        for item in bucket.list(prefix=prefix, delimiter='/'):
            if isinstance(item, Prefix):
                res.append(
                    S3Stat.from_key(Key(item.bucket, item.name), is_dir=True))
            else:
                if item.name == prefix:
                    continue
                res.append(self._stats_key(item))
        return res

    @translate_s3_error
    def listdir(self, path, glob=None):
        return [
            s3.parse_uri(x.path)[2] for x in self.listdir_stats(path, glob)
        ]

    @translate_s3_error
    @auth_error_handler
    def rmtree(self, path, skipTrash=True):