def __init__(self, settings): self.access_key_id = read_setting(settings, 's3.access_key_id') self.secret_access_key = read_setting(settings, 's3.secret_access_key') session = Session(aws_access_key_id=self.access_key_id, aws_secret_access_key=self.secret_access_key, region_name=read_setting(settings, 's3.region_name')) # self.s3 = resource('s3') self.s3 = session.resource('s3') self._set_bucket(settings)
def _allow_storage_of(self, bytes_io, metadata): '''Override this method if you wish to abort storing some files. To abort, raise FileNotAllowed with a message explaining why. ''' settings = self.orchestrator.settings maximum = read_setting(settings, 'fls.max_file_size', default=None) if maximum is not None: maximum = int(maximum) if metadata['length'] > maximum: raise FileNotAllowed( 'The file is {} KB long and the maximum is {} KB.'.format( int(metadata['length'] / 1024), int(maximum / 1024))) allow_empty = asbool(read_setting( settings, 'fls.allow_empty_files', default=False)) if not allow_empty and metadata['length'] == 0: raise FileNotAllowed('The file is empty.')
def __init__(self, settings): self.storage_path = read_setting(settings, "local.storage_path") self.directory = resolve_path(self.storage_path).absolute() if not self.directory.exists(): self.directory.mkdir(parents=True)
def _set_bucket(self, settings): self.bucket_name = read_setting(settings, 's3.bucket') self.bucket = self.s3.Bucket(self.bucket_name)