Beispiel #1
0
    def test_upload_user_can_check_uploadcomplete(self, get_federation_token):
        client = Client()
        client.defaults[
            'HTTP_AUTHORIZATION'] = f'Token {self.everything_but_admin_user_token}'

        # Patch what get_federation_token() would return since we will not be calling any real AWS STS service.
        get_federation_token.return_value = self.fake_federation_token

        response = client.get(
            f'/filemaster/api/file/{self.accessible_file_uuid}/upload/?bucket={ALLOWED_BUCKET_NAME}'
        )
        self.assertEqual(response.status_code, 200)

        file_location_id = json.loads(response.content)["locationid"]
        folder_name = json.loads(response.content)["foldername"]
        file_name = json.loads(response.content)["filename"]

        # Pretend to upload the file to S3.
        file_content = io.StringIO()
        file_content.write("abcdefghijklmnopqrst")

        s3 = awsClient(service='s3')
        s3.upload_fileobj(file_content, ALLOWED_BUCKET_NAME,
                          folder_name + '/' + file_name)

        response = client.get(
            f'/filemaster/api/file/{self.accessible_file_uuid}/uploadcomplete/?location={file_location_id}'
        )
        self.assertEqual(response.status_code, 200)
Beispiel #2
0
    def test_notdownloader_user_cannot_get_download_url_for_uploaded_file(
            self, get_federation_token):
        # First have a superuser upload the file
        client = Client()
        client.defaults[
            'HTTP_AUTHORIZATION'] = f'Token {self.super_user_token}'

        # Patch what get_federation_token() would return since we will not be calling any real AWS STS service.
        get_federation_token.return_value = self.fake_federation_token

        response = client.get(
            f'/filemaster/api/file/{self.accessible_file_uuid}/upload/?bucket={ALLOWED_BUCKET_NAME}'
        )
        self.assertEqual(response.status_code, 200)

        folder_name = json.loads(response.content)["foldername"]
        file_name = json.loads(response.content)["filename"]

        # Pretend to upload the file to S3.
        file_content = io.StringIO()
        file_content.write("abcdefghijklmnopqrst")

        s3 = awsClient(service='s3')
        s3.upload_fileobj(file_content, ALLOWED_BUCKET_NAME,
                          folder_name + '/' + file_name)

        # Then have a readonly user try to download it.
        client.defaults[
            'HTTP_AUTHORIZATION'] = f'Token {self.readonly_user_token}'
        response = client.get(
            f'/filemaster/api/file/{self.accessible_file_uuid}/download/')
        self.assertEqual(response.status_code, 403)
Beispiel #3
0
    def superuser_uploads_and_downloads_file(self, get_federation_token):
        """
        A helper method to quickly have a superuser upload and download a file.
        """

        client = Client()
        client.defaults[
            'HTTP_AUTHORIZATION'] = f'Token {self.super_user_token}'

        # Patch what get_federation_token() would return since we will not be calling any real AWS STS service.
        get_federation_token.return_value = self.fake_federation_token

        response = client.get(
            f'/filemaster/api/file/{self.accessible_file_uuid}/upload/?bucket={ALLOWED_BUCKET_NAME}'
        )
        self.assertEqual(response.status_code, 200)

        folder_name = json.loads(response.content)["foldername"]
        file_name = json.loads(response.content)["filename"]

        # Pretend to upload the file to S3.
        file_content = io.StringIO()
        file_content.write("abcdefghijklmnopqrst")

        s3 = awsClient(service='s3')
        s3.upload_fileobj(file_content, ALLOWED_BUCKET_NAME,
                          folder_name + '/' + file_name)

        response = client.get(
            f'/filemaster/api/file/{self.accessible_file_uuid}/download/')
Beispiel #4
0
    def test_download_request_creates_download_log(self, get_federation_token):

        # First have a superuser upload the file
        client = Client()
        client.defaults[
            'HTTP_AUTHORIZATION'] = f'Token {self.super_user_token}'

        # Patch what get_federation_token() would return since we will not be calling any real AWS STS service.
        get_federation_token.return_value = self.fake_federation_token

        response = client.get(
            f'/filemaster/api/file/{self.accessible_file_uuid}/upload/?bucket={ALLOWED_BUCKET_NAME}'
        )
        self.assertEqual(response.status_code, 200)

        folder_name = json.loads(response.content)["foldername"]
        file_name = json.loads(response.content)["filename"]

        # Pretend to upload the file to S3.
        file_content = io.StringIO()
        file_content.write("abcdefghijklmnopqrst")
        s3 = awsClient(service='s3')
        s3.upload_fileobj(file_content, ALLOWED_BUCKET_NAME,
                          folder_name + '/' + file_name)

        # Then have a downloader user try to download it.
        client.defaults[
            'HTTP_AUTHORIZATION'] = f'Token {self.everything_but_admin_user_token}'
        response = client.get(
            f'/filemaster/api/file/{self.accessible_file_uuid}/download/')
        self.assertEqual(response.status_code, 200)
        self.assertTrue("url" in json.loads(response.content))

        # Then check to see if a download log was created.
        archivefile = ArchiveFile.objects.get(uuid=self.accessible_file_uuid)
        download_log = DownloadLog.objects.filter(
            archivefile=archivefile,
            requesting_user=self.everything_but_admin_user)
        self.assertTrue(download_log.exists())
Beispiel #5
0
    def post(self, request, uuid=None):

        # Get the file record
        try:
            archive_file = ArchiveFile.objects.get(uuid=uuid)
        except ObjectDoesNotExist:
            return HttpResponseNotFound()

        # Check permissions
        if not request.user.has_perm('filemaster.upload_archivefile',
                                     archive_file):
            return HttpResponseForbidden()

        # Pull request parameters
        try:
            expires = int(self.request.query_params.get('expires', '10'))
        except ValueError:
            return HttpResponseBadRequest(
                f'Parameter for expiration is not a valid integer')
        bucket = self.request.query_params.get('bucket')

        # If no bucket specified, default to first created
        if not bucket:
            try:
                bucket = next(iter(settings.BUCKETS))
            except Exception as e:
                log.exception(f'Error finding default bucket: {e}',
                              exc_info=True,
                              extra={'request': request})
                return HttpResponseBadRequest(
                    f'No default bucket has been configured for Fileservice, must specify'
                    f'bucket in request')

        try:
            # Check bucket perms
            if not request.user.has_perm('filemaster.write_bucket',
                                         Bucket.objects.get(name=bucket)):
                return HttpResponseForbidden(
                    f'User does not have permissions on Bucket "{bucket}"')
        except Bucket.DoesNotExist:
            return HttpResponseNotFound(
                f'Bucket "{bucket}" does not exist in Fileservice')

        # Check for extra conditions
        conditions = []
        try:
            conditions_b64 = self.request.query_params.get('conditions')
            if conditions_b64:
                # Decode and load
                conditions = json.loads(
                    base64.b64decode(conditions_b64.encode()).decode())

                log.debug('Extra conditions: {}'.format(conditions))

        except Exception as e:
            log.exception('Conditions error: {}'.format(e),
                          exc_info=True,
                          extra={
                              'conditions':
                              self.request.query_params.get('conditions'),
                          })

        # Generate a folder name
        folder_name = str(uuid4())

        # Ensure the bucket is writable
        if not request.user.has_perm('filemaster.write_bucket',
                                     Bucket.objects.get(name=bucket)):
            return HttpResponseForbidden()

        # Build the key
        key = folder_name + "/" + archive_file.filename

        # Get region
        region = awsBucketRegion(bucket)

        # Generate the post
        s3 = awsClient(service='s3', region=region)

        post = s3.generate_presigned_post(
            Bucket=bucket,
            Key=key,
            ExpiresIn=expires,
            Conditions=conditions,
        )

        # Form the URL to the file
        url = "S3://%s/%s" % (bucket, key)
        log.debug('Url: {}'.format(url))

        # Register file
        file_location = FileLocation(url=url, storagetype='s3')
        file_location.save()
        archive_file.locations.add(file_location)

        # Return the POST to the uploader
        return Response({'post': post, 'locationid': file_location.id})