Beispiel #1
0
    def new_file(self, *args, **kwargs):
        """
        Create the file object to append to as data is coming in.
        """
        # If file size is larger than the maximum allowed, then abort
        if MAX_UPLOAD_SIZE:
            if self.content_length > MAX_UPLOAD_SIZE:
                raise UploadFailed('File too large')
        
        super().new_file(*args, **kwargs)
        self.parts = []
        self.bucket_name = settings.AWS_STORAGE_BUCKET_NAME
        self.s3_key = generate_object_key(self.request, self.file_name)
        self.client = s3_client()
        self.multipart = self.client.create_multipart_upload(
            Bucket=self.bucket_name,
            Key=self.s3_key
        )
        self.upload_id = self.multipart['UploadId']
        self.executor = ThreadedS3ChunkUploader(
            self.client,
            self.bucket_name,
            key=self.s3_key,
            upload_id=self.upload_id
        )

        # prepare a storages object as a file placeholder
        self.storage = S3Boto3Storage()
        self.file = S3Boto3StorageFile(self.s3_key, 'w', self.storage)
        self.file.original_name = self.file_name
Beispiel #2
0
 def isfile(self, name):
     try:
         name = self._normalize_name(self._clean_name(name))
         f = S3Boto3StorageFile(name, 'rb', self)
         if not f:
             return False
         return True
     except:
         return False
 def isfile(self, name):
     try:
         name = self._normalize_name(self._clean_name(name))
         f = S3Boto3StorageFile(name, 'rb', self)
         if "directory" in f.obj.content_type:
             return False
         return True
     except Exception:
         return False
Beispiel #4
0
def read_theme_file_from_s3(file_path):
    """
    Read a theme file from S3.
    The file_path should be the relative path in the media directory.

    Example:
    file_content = read_theme_file_from_s3('themename/templates/default.html')
    """
    # the DEFAULT_S3_PATH is where the media files are stored.
    file_path = '%s/%s' % (settings.THEME_S3_PATH, str(file_path).lstrip('/'))
    storage = S3Boto3Storage()
    f = S3Boto3StorageFile(file_path, 'r', storage)
    content = f.read()
    f.close()

    return content
Beispiel #5
0
def read_media_file_from_s3(file_path):
    """
    Read a media file from S3.
    The file_path should be the relative path in the media directory.

    Example:
    file_content = read_media_file_from_s3('/files/99/Earth-and-Moon.gif')
    """
    # the DEFAULT_S3_PATH is where the media files are stored.
    file_path = '%s/%s' % (settings.DEFAULT_S3_PATH,
                           str(file_path).lstrip('/'))
    storage = S3Boto3Storage()
    f = S3Boto3StorageFile(file_path, 'r', storage)
    content = f.read()
    f.close()

    return content
Beispiel #6
0
    def test_image_from_hash(self):
        storage = S3Boto3Storage(bucket=self.bucket)
        s3_file = S3Boto3StorageFile(name=self.s3_image_key,
                                     mode='r',
                                     storage=storage)

        # Mock model Image field
        # S3Boto3StorageFile store storage information in ._storage wich is not checked by
        # ImageFile during storage identification
        s3_file.storage = storage

        image_s3 = ImageFile(s3_file)

        # Test local part
        options = {}
        thumbnail = get_thumbnail(s3_file, self.geometry_string, **options)
        print('Generated thumbnail url: {}'.format(thumbnail.url))
        print('Thumbnail cache key: {}'.format(thumbnail.name))

        # Now, test shrinkmeister server (should be up and running)
        resp = requests.get(thumbnail.url)
        image = Image(blob=resp.content)
        self.assertEqual(image.width, 50)
        self.assertEqual(image.height, 38)

        thumbnail_from_cache = self.cache.get(thumbnail.name)
        self.assertNotEqual(thumbnail_from_cache,
                            None,
                            msg="No image in cache detected :(")
        image = Image(blob=resp.content)
        self.assertEqual(image.width, 50)
        self.assertEqual(image.height, 38)

        resp = requests.get(thumbnail_from_cache.url)

        url, ext = thumbnail_from_cache.url.rsplit('.', 1)
        x2_url = '{}@2x.{}'.format(url, ext)
        print('x2 url {}'.format(x2_url))
        resp = requests.get(x2_url)
        image = Image(blob=resp.content)
        self.assertEqual(image.width, 100)
        self.assertEqual(image.height, 75)
Beispiel #7
0
    def get_redirect_url(self, *args, **kwargs):
        try:
            data = signing.loads(kwargs['hash'],
                                 key=settings.THUMBNAIL_SECRET_KEY)
            bucket = data['bucket']
            key = data['key']
            geometry_string = data['geometry_string']
        except KeyError:
            raise Http404()

        if 'options' in data:
            options = merge_with_defaults(data['options'])
        else:
            options = {}

        if 'cache_key' in data:
            cache_key = data['cache_key']
        else:
            # !!! WARNING !!!
            # Generating same cache_key on both sides may be tricky
            # Preferred way is to transfer cache_key from client side
            cache_key = generate_cache_key(bucket=bucket,
                                           key=key,
                                           geometry_string=geometry_string,
                                           **options)

        # Multiple calls for same image during thumbnail generation scenario
        cached_thumbnail = shrinkmeister_cache.get(cache_key, None)
        #        if cached_thumbnail:
        #            return cached_thumbnail.url

        client = boto3.client('s3')
        storage = S3Boto3Storage(bucket=bucket)
        s3_file = S3Boto3StorageFile(name=key, mode='r', storage=storage)
        # Mock model Image field
        # S3Boto3StorageFile store storage information in ._storage wich is not checked by
        # ImageFile during storage identification
        s3_file.storage = storage
        options['cache_key'] = cache_key
        thumbnail = get_thumbnail(s3_file, geometry_string, **options)
        return thumbnail.url
    def new_file(self, *args, **kwargs):
        """
        Create the file object to append to as data is coming in.
        """
        # If content_length is provided and file size is larger than the maximum allowed, then abort
        # Note that as per Django docs, content_length is not guaranteed to be provided.
        if MAX_UPLOAD_SIZE and self.content_length:
            if self.content_length > MAX_UPLOAD_SIZE:
                raise UploadFailed('File too large')

        super().new_file(*args, **kwargs)
        self.parts = []
        self.bucket_name = AWS_STORAGE_BUCKET_NAME
        file_name = self.file_name
        if CLEAN_FILE_NAME:
            file_name = slugify(self.file_name)
        self.s3_key = generate_object_key(self.request, file_name)
        self.client = s3_client()
        self.multipart = self.client.create_multipart_upload(
            Bucket=self.bucket_name,
            Key=self.s3_key,
            ContentType=self.content_type,
        )
        self.upload_id = self.multipart['UploadId']
        self.executor = ThreadedS3ChunkUploader(
            self.client,
            self.bucket_name,
            key=self.s3_key,
            upload_id=self.upload_id
        )

        # prepare a storages object as a file placeholder
        self.storage = S3Boto3Storage()
        self.file = S3Boto3StorageFile(self.s3_key, 'w', self.storage)
        self.file.original_name = self.file_name
        self.file.content_type = self.content_type
    def file_complete(self, file_size):
        self.executor.add(None)

        # Wait for all threads to complete
        wait(
            self.executor.futures, return_when=concurrent.futures.ALL_COMPLETED
        )

        parts = self.executor.get_parts()

        self.s3_client.complete_multipart_upload(
            Bucket=AWS_STORAGE_BUCKET_NAME,
            Key=self.s3_key,
            UploadId=self.upload_id,
            MultipartUpload={"Parts": parts},
        )

        self.s3_client.copy_object(
            Bucket=AWS_STORAGE_BUCKET_NAME,
            CopySource=f"{AWS_STORAGE_BUCKET_NAME}/{self.s3_key}",
            Key=self.new_file_name,
            ContentType=self.content_type,
        )

        self.s3_client.delete_object(
            Bucket=AWS_STORAGE_BUCKET_NAME,
            Key=self.s3_key,
        )

        if "clam_av_results" in self.content_type_extra:
            for result in self.content_type_extra["clam_av_results"]:
                if result["file_name"] == self.file_name:
                    # Set AV headers
                    if result["av_passed"]:
                        self.s3_client.copy_object(
                            Bucket=AWS_STORAGE_BUCKET_NAME,
                            CopySource=f"{AWS_STORAGE_BUCKET_NAME}/{self.new_file_name}",
                            Key=self.new_file_name,
                            Metadata={
                                "av-scanned-at": result["scanned_at"].strftime(
                                    "%Y-%m-%d %H:%M:%S"
                                ),
                                "av-passed": "True",
                            },
                            ContentType=self.content_type,
                            MetadataDirective="REPLACE",
                        )
                    else:
                        # Remove file with virus from S3
                        self.s3_client.delete_object(
                            Bucket=AWS_STORAGE_BUCKET_NAME,
                            Key=self.new_file_name,
                        )

                        if CHUNK_UPLOADER_RAISE_EXCEPTION_ON_VIRUS_FOUND:
                            raise VirusFoundInFileException()
                        else:
                            return FileWithVirus(field_name=self.field_name)

        storage = S3Boto3Storage()
        file = S3Boto3StorageFile(self.new_file_name, "rb", storage)
        file.content_type = self.content_type
        file.original_name = self.file_name

        file.file_size = file_size
        file.close()

        return file