def test_from_bucket(): s = S3Stat.from_bucket(FakeBucket('boo')) eq_('DIRECTORY', s.type) eq_('boo', s.name) eq_('s3://boo', s.path) eq_(0, s.size) eq_(None, s.atime)
def listdir_stats(self, path, glob=None): if glob is not None: raise NotImplementedError(_("Option `glob` is not implemented")) if S3FileSystem.isroot(path): try: return sorted([S3Stat.from_bucket(b, self.fs) for b in self._s3_connection.get_all_buckets(headers=self.header_values)], key=lambda x: x.name) except S3FileSystemException as e: raise e except S3ResponseError as e: if 'Forbidden' in str(e) or (hasattr(e, 'status') and e.status == 403): raise S3ListAllBucketsException(_('You do not have permissions to list all buckets. Please specify a bucket name you have access to.')) else: raise S3FileSystemException(_('Failed to retrieve buckets: %s') % e.reason) except Exception as e: raise S3FileSystemException(_('Failed to retrieve buckets: %s') % e) bucket_name, prefix = s3.parse_uri(path)[:2] bucket = self._get_bucket(bucket_name) prefix = self._append_separator(prefix) res = [] for item in bucket.list(prefix=prefix, delimiter='/', headers=self.header_values): if isinstance(item, Prefix): res.append(S3Stat.from_key(Key(item.bucket, item.name), is_dir=True, fs=self.fs)) else: if item.name == prefix: continue res.append(self._stats_key(item, self.fs)) return res
def listdir_stats(self, path, glob=None): if glob is not None: raise NotImplementedError(_("Option `glob` is not implemented")) if s3.is_root(path): try: return sorted([ S3Stat.from_bucket(b) for b in self._s3_connection.get_all_buckets() ], key=lambda x: x.name) except S3FileSystemException as e: raise e except S3ResponseError as e: raise S3FileSystemException( _('Failed to retrieve buckets: %s') % e.reason) except Exception as e: raise S3FileSystemException( _('Failed to retrieve buckets: %s') % e) bucket_name, prefix = s3.parse_uri(path)[:2] bucket = self._get_bucket(bucket_name) prefix = self._append_separator(prefix) res = [] for item in bucket.list(prefix=prefix, delimiter='/'): if isinstance(item, Prefix): res.append( S3Stat.from_key(Key(item.bucket, item.name), is_dir=True)) else: if item.name == prefix: continue res.append(self._stats_key(item)) return res
def test_from_bucket(): s = S3Stat.from_bucket(FakeBucket("boo")) eq_("DIRECTORY", s.type) eq_("boo", s.name) eq_("s3a://boo", s.path) eq_(0, s.size) eq_(None, s.atime)
def listdir_stats(self, path, glob=None): if glob is not None: raise NotImplementedError(_("Option `glob` is not implemented")) if s3.is_root(path): try: return sorted([S3Stat.from_bucket(b) for b in self._s3_connection.get_all_buckets()], key=lambda x: x.name) except S3FileSystemException, e: raise e except S3ResponseError, e: raise S3FileSystemException(_('Failed to retrieve buckets: %s') % e.reason)
def listdir_stats(self, path, glob=None): if glob is not None: raise NotImplementedError(_("Option `glob` is not implemented")) if s3.is_root(path): self._init_bucket_cache() return sorted([S3Stat.from_bucket(b) for b in self._bucket_cache.values()], key=lambda x: x.name) bucket_name, prefix = s3.parse_uri(path)[:2] bucket = self._get_bucket(bucket_name) prefix = self._append_separator(prefix) res = [] for item in bucket.list(prefix=prefix, delimiter='/'): if isinstance(item, Prefix): res.append(S3Stat.from_key(Key(item.bucket, item.name), is_dir=True)) else: if item.name == prefix: continue res.append(self._stats_key(item)) return res
def listdir_stats(self, path, glob=None): if glob is not None: raise NotImplementedError(_("Option `glob` is not implemented")) if s3.is_root(path): self._init_bucket_cache() return [S3Stat.from_bucket(b) for b in self._bucket_cache.values()] bucket_name, prefix = s3.parse_uri(path)[:2] bucket = self._get_bucket(bucket_name) prefix = self._append_separator(prefix) res = [] for item in bucket.list(prefix=prefix, delimiter='/'): if isinstance(item, Prefix): res.append(S3Stat.from_key(Key(item.bucket, item.name), is_dir=True)) else: if item.name == prefix: continue res.append(self._stats_key(item)) return res