def __init__(self, data):
     super(DynamicS3Storage, self).__init__(data)
     self.bucket_name_template = self.parsed_data['bucket_template']
     self.bucket_secret = (self.parsed_data.get('bucket_secret', '') or
                           self.session._session.get_scoped_config().get('indico_bucket_secret', ''))
     if not any(x in self.bucket_name_template for x in ('<year>', '<month>', '<week>')):
         raise StorageError('At least one date placeholder is required when using dynamic bucket names')
     if not self.bucket_secret:
         raise StorageError('A bucket secret is required when using dynamic bucket names')
     if len(self._replace_bucket_placeholders(self.bucket_name_template, date.today())) > 46:
         raise StorageError('Bucket name cannot be longer than 46 chars (to keep at least 16 hash chars)')
Пример #2
0
 def save(self, name, content_type, filename, fileobj):
     try:
         fs = self._get_xrootd_fs()
         return self._save(name, fileobj, fs)
     except Exception as e:
         raise StorageError('Could not save "{}": {}'.format(
             name, e)), None, sys.exc_info()[2]
Пример #3
0
 def open(self, file_id):
     bucket, id_ = self._parse_file_id(file_id)
     try:
         s3_object = self.client.get_object(Bucket=bucket, Key=id_)['Body']
         return BytesIO(s3_object.read())
     except Exception as exc:
         raise StorageError(f'Could not open "{file_id}": {exc}') from exc
 def save(self, name, content_type, filename, fileobj):
     try:
         bucket = self._get_current_bucket_name()
         checksum = self._save(bucket, name, content_type, fileobj)
         return name, checksum
     except Exception as e:
         raise StorageError('Could not save "{}": {}'.format(name, e)), None, sys.exc_info()[2]
Пример #5
0
 def save(self, name, content_type, filename, fileobj):
     try:
         bucket = self._get_current_bucket_name()
         checksum = self._save(bucket, name, content_type, fileobj)
         return name, checksum
     except Exception as exc:
         raise StorageError(f'Could not save "{name}": {exc}') from exc
Пример #6
0
    def send_file(self, file_id, content_type, filename, inline=True):
        if self.proxy_downloads == ProxyDownloadsMode.local:
            return send_file(filename,
                             self.open(file_id),
                             content_type,
                             inline=inline)

        try:
            bucket, id_ = self._parse_file_id(file_id)
            content_disp = 'inline' if inline else 'attachment'
            h = Headers()
            h.add('Content-Disposition', content_disp,
                  **make_content_disposition_args(filename))
            url = self.client.generate_presigned_url(
                'get_object',
                Params={
                    'Bucket': bucket,
                    'Key': id_,
                    'ResponseContentDisposition': h.get('Content-Disposition'),
                    'ResponseContentType': content_type
                },
                ExpiresIn=120)
            response = redirect(url)
            if self.proxy_downloads == ProxyDownloadsMode.nginx:
                # nginx can proxy the request to S3 to avoid exposing the redirect and
                # bucket URL to the end user (since it is quite ugly and temporary)
                response.headers[
                    'X-Accel-Redirect'] = '/.xsf/s3/' + url.replace(
                        '://', '/', 1)
            return response
        except Exception as exc:
            raise StorageError(
                f'Could not send file "{file_id}": {exc}') from exc
Пример #7
0
    def send_file(self, file_id, content_type, filename, inline=True):
        if self.proxy_downloads:
            return send_file(filename,
                             self.open(file_id),
                             content_type,
                             inline=inline)

        try:
            bucket, id_ = self._parse_file_id(file_id)
            content_disp = 'inline' if inline else 'attachment'
            h = Headers()
            h.add('Content-Disposition', content_disp, filename=filename)
            url = self.client.generate_presigned_url(
                'get_object',
                Params={
                    'Bucket': bucket,
                    'Key': id_,
                    'ResponseContentDisposition': h.get('Content-Disposition'),
                    'ResponseContentType': content_type
                },
                ExpiresIn=120)
            return redirect(url)
        except Exception as e:
            raise StorageError('Could not send file "{}": {}'.format(
                file_id, e)), None, sys.exc_info()[2]
Пример #8
0
 def delete(self, file_id):
     bucket, id_ = self._parse_file_id(file_id)
     try:
         self.client.delete_object(bucket, id_)
     except Exception as e:
         raise StorageError('Could not delete "{}": {}'.format(
             file_id, e)), None, sys.exc_info()[2]
Пример #9
0
 def open(self, file_id):
     try:
         return self._get_xrootd_fs().open(self._resolve_path(file_id),
                                           'rb')
     except Exception as e:
         raise StorageError('Could not open "{}": {}'.format(
             file_id, e)), None, sys.exc_info()[2]
Пример #10
0
 def open(self, file_id):
     bucket, id_ = self._parse_file_id(file_id)
     try:
         s3_object = self.client.get_object(Bucket=bucket, Key=id_)['Body']
         return BytesIO(s3_object.read())
     except Exception as e:
         raise StorageError('Could not open "{}": {}'.format(file_id, e)), None, sys.exc_info()[2]
Пример #11
0
 def getsize(self, file_id):
     bucket, id_ = self._parse_file_id(file_id)
     try:
         return self.client.head_object(Bucket=bucket,
                                        Key=id_)['ContentLength']
     except Exception as exc:
         raise StorageError(
             f'Could not get size of "{file_id}": {exc}') from exc
Пример #12
0
 def getsize(self, file_id):
     bucket, id_ = self._parse_file_id(file_id)
     try:
         return self.client.head_object(Bucket=bucket,
                                        Key=id_)['ContentLength']
     except Exception as e:
         raise StorageError('Could not get size of "{}": {}'.format(
             file_id, e)), None, sys.exc_info()[2]
Пример #13
0
 def getsize(self, file_id):
     try:
         fs = self._get_xrootd_fs()
         path = self._resolve_path(file_id)
         fullpath = fs._p(path)
         status, stat = fs._client.stat(fullpath)
         if not status.ok:
             fs._raise_status(path, status)
         return stat.size
     except Exception as e:
         raise StorageError('Could not get size of "{}": {}'.format(file_id, e)), None, sys.exc_info()[2]
Пример #14
0
 def save(self, name, content_type, filename, fileobj):
     try:
         if isinstance(fileobj, basestring):
             size_hint = len(fileobj)
         elif hasattr(fileobj, 'seek') and hasattr(fileobj, 'tell'):
             pos = fileobj.tell()
             fileobj.seek(0, os.SEEK_END)
             size_hint = fileobj.tell() - pos
             fileobj.seek(pos, os.SEEK_SET)
         else:
             # Very unlikely to happen:
             # - strings/bytes have a length
             # - uploaded files are either fdopen (file-buffered) or BytesIO objects (-> seekable)
             # - other file-like objects are usually seekable too
             # So the only case where we can end up here is when using some custom file-like
             # object which does not have seek/tell methods.
             size_hint = current_app.config['MAX_CONTENT_LENGTH'] if current_app else None
         fs = self._get_xrootd_fs(size=size_hint)
         return self._save(name, fileobj, fs)
     except Exception as e:
         raise StorageError('Could not save "{}": {}'.format(name, e)), None, sys.exc_info()[2]
Пример #15
0
 def _check_bucket_secret(self):
     if not self.bucket_secret:
         raise StorageError(
             'A bucket secret is required when using dynamic bucket names')
Пример #16
0
 def delete(self, file_id):
     bucket, id_ = self._parse_file_id(file_id)
     try:
         self.client.delete_object(Bucket=bucket, Key=id_)
     except Exception as exc:
         raise StorageError(f'Could not delete "{file_id}": {exc}') from exc
Пример #17
0
 def __init__(self, data):
     super(S3Storage, self).__init__(data)
     self.bucket_name = self.parsed_data['bucket']
     if len(self.bucket_name) > 63:
         raise StorageError('Bucket name cannot be longer than 63 chars')
Пример #18
0
 def delete(self, file_id):
     try:
         self._get_xrootd_fs().remove(self._resolve_path(file_id))
     except Exception as e:
         raise StorageError('Could not delete "{}": {}'.format(
             file_id, e)), None, sys.exc_info()[2]