def handle(self, *args, **options):
        # type: (*Any, **str) -> None
        old_email = options['old_email']

        if options['new_email']:
            new_email = options['new_email']
        else:
            new_email = old_email

        gravatar_url = "https://secure.gravatar.com/avatar/%s?d=identicon" % (
            gravatar_hash(old_email), )
        gravatar_data = requests.get(gravatar_url).content
        gravatar_file = SimpleUploadedFile('gravatar.jpg', gravatar_data,
                                           'image/jpeg')

        try:
            user_profile = get_user_profile_by_email(old_email)
            upload_avatar_image(gravatar_file, user_profile, user_profile)
            user_profile.avatar_source = UserProfile.AVATAR_FROM_USER
            user_profile.save(update_fields=['avatar_source'])
        except UserProfile.DoesNotExist:
            raise CommandError("Could not find specified user for email %s" %
                               (old_email))

        if old_email != new_email:
            gravatar_file.seek(0)
            try:
                user_profile = get_user_profile_by_email(new_email)
                upload_avatar_image(gravatar_file, user_profile, user_profile)
                user_profile.avatar_source = UserProfile.AVATAR_FROM_USER
                user_profile.save(update_fields=['avatar_source'])
            except UserProfile.DoesNotExist:
                raise CommandError(
                    "Could not find specified user for email %s" % (new_email))
    def handle(self, *args, **options):
        # type: (*Any, **str) -> None
        old_email = options['old_email']

        if options['new_email']:
            new_email = options['new_email']
        else:
            new_email = old_email

        gravatar_url = "https://secure.gravatar.com/avatar/%s?d=identicon" % (gravatar_hash(old_email),)
        gravatar_data = requests.get(gravatar_url).content
        gravatar_file = SimpleUploadedFile('gravatar.jpg', gravatar_data, 'image/jpeg')

        try:
            user_profile = get_user_profile_by_email(old_email)
        except UserProfile.DoesNotExist:
            try:
                user_profile = get_user_profile_by_email(new_email)
            except UserProfile.DoesNotExist:
                raise CommandError("Could not find specified user")

        upload_avatar_image(gravatar_file, user_profile, old_email)
        if old_email != new_email:
            gravatar_file.seek(0)
            upload_avatar_image(gravatar_file, user_profile, new_email)

        user_profile.avatar_source = UserProfile.AVATAR_FROM_USER
        user_profile.save(update_fields=['avatar_source'])
Beispiel #3
0
    def test_comment_image_upload_unique_no_duplication(self):
        utils.login(self)
        img = io.BytesIO(
            b'GIF87a\x01\x00\x01\x00\x80\x01\x00\x00\x00\x00ccc,\x00'
            b'\x00\x00\x00\x01\x00\x01\x00\x00\x02\x02D\x01\x00;')
        image_name = 'foo_image.gif'
        file = SimpleUploadedFile(
            image_name, img.read(), content_type='image/gif')

        response = self.client.post(
            reverse('spirit:comment:image-upload-ajax'),
            HTTP_X_REQUESTED_WITH='XMLHttpRequest',
            data={'image': file})
        res = json.loads(response.content.decode('utf-8'))
        first_url = res['url']

        utils.cache_clear()
        file.seek(0)
        response = self.client.post(
            reverse('spirit:comment:image-upload-ajax'),
            HTTP_X_REQUESTED_WITH='XMLHttpRequest',
            data={'image': file})
        res = json.loads(response.content.decode('utf-8'))
        second_url = res['url']

        self.assertNotEqual(first_url, second_url)
Beispiel #4
0
    def test_comment_file_upload_unique_no_duplication(self):
        utils.login(self)
        pdf = io.BytesIO(
            b'%PDF-1.0\n1 0 obj<</Type/Catalog/Pages 2 0 R>>endobj 2 0 obj<</Type/Pages/Kids[3 0 R]/Count 1'
            b'>>endobj 3 0 obj<</Type/Page/MediaBox[0 0 3 3]>>endobj\nxref\n0 4\n0000000000 65535 f\n000000'
            b'0010 00000 n\n0000000053 00000 n\n0000000102 00000 n\ntrailer<</Size 4/Root 1 0 R>>\nstartxre'
            b'f\n149\n%EOF\n')
        file_name = 'foo.pdf'
        file = SimpleUploadedFile(
            file_name, pdf.read(), content_type='application/pdf')

        response = self.client.post(
            reverse('spirit:comment:file-upload-ajax'),
            HTTP_X_REQUESTED_WITH='XMLHttpRequest',
            data={'file': file})
        res = json.loads(response.content.decode('utf-8'))
        first_url = res['url']

        utils.cache_clear()
        file.seek(0)
        response = self.client.post(
            reverse('spirit:comment:file-upload-ajax'),
            HTTP_X_REQUESTED_WITH='XMLHttpRequest',
            data={'file': file})
        res = json.loads(response.content.decode('utf-8'))
        second_url = res['url']

        self.assertNotEqual(first_url, second_url)
Beispiel #5
0
    def test_comment_file_upload_unique_no_duplication(self):
        utils.login(self)
        pdf = BytesIO(
            b'%PDF-1.0\n1 0 obj<</Type/Catalog/Pages 2 0 R>>endobj 2 0 obj<</Type/Pages/Kids[3 0 R]/Count 1'
            b'>>endobj 3 0 obj<</Type/Page/MediaBox[0 0 3 3]>>endobj\nxref\n0 4\n0000000000 65535 f\n000000'
            b'0010 00000 n\n0000000053 00000 n\n0000000102 00000 n\ntrailer<</Size 4/Root 1 0 R>>\nstartxre'
            b'f\n149\n%EOF\n')
        file_name = 'foo.pdf'
        file = SimpleUploadedFile(
            file_name, pdf.read(), content_type='application/pdf')

        response = self.client.post(
            reverse('spirit:comment:file-upload-ajax'),
            HTTP_X_REQUESTED_WITH='XMLHttpRequest',
            data={'file': file})
        res = json.loads(response.content.decode('utf-8'))
        first_url = res['url']

        utils.cache_clear()
        file.seek(0)
        response = self.client.post(
            reverse('spirit:comment:file-upload-ajax'),
            HTTP_X_REQUESTED_WITH='XMLHttpRequest',
            data={'file': file})
        res = json.loads(response.content.decode('utf-8'))
        second_url = res['url']

        self.assertNotEqual(first_url, second_url)
Beispiel #6
0
    def test_comment_image_upload_unique_no_duplication(self):
        utils.login(self)
        img = BytesIO(
            b'GIF87a\x01\x00\x01\x00\x80\x01\x00\x00\x00\x00ccc,\x00'
            b'\x00\x00\x00\x01\x00\x01\x00\x00\x02\x02D\x01\x00;')
        image_name = 'foo_image.gif'
        file = SimpleUploadedFile(
            image_name, img.read(), content_type='image/gif')

        response = self.client.post(
            reverse('spirit:comment:image-upload-ajax'),
            HTTP_X_REQUESTED_WITH='XMLHttpRequest',
            data={'image': file})
        res = json.loads(response.content.decode('utf-8'))
        first_url = res['url']

        utils.cache_clear()
        file.seek(0)
        response = self.client.post(
            reverse('spirit:comment:image-upload-ajax'),
            HTTP_X_REQUESTED_WITH='XMLHttpRequest',
            data={'image': file})
        res = json.loads(response.content.decode('utf-8'))
        second_url = res['url']

        self.assertNotEqual(first_url, second_url)
Beispiel #7
0
class FileProxyTest(TestCase):
    def setUp(self):
        self.dummy_file = SimpleUploadedFile(name="dummy.txt", content=b"I am content.\n")

    def test_create_file_proxy(self):
        """Test creation of `FileProxy` object."""
        fp = FileProxy.objects.create(name=self.dummy_file.name, file=self.dummy_file)

        # Now refresh it and make sure it was saved and retrieved correctly.
        fp.refresh_from_db()
        self.dummy_file.seek(0)  # Reset cursor since it was previously read
        self.assertEqual(fp.name, self.dummy_file.name)
        self.assertEqual(fp.file.read(), self.dummy_file.read())

    def test_delete_file_proxy(self):
        """Test deletion of `FileProxy` object."""
        fp = FileProxy.objects.create(name=self.dummy_file.name, file=self.dummy_file)

        # Assert counts before delete
        self.assertEqual(FileProxy.objects.count(), 1)
        self.assertEqual(FileAttachment.objects.count(), 1)

        # Assert counts after delete
        fp.delete()
        self.assertEqual(FileProxy.objects.count(), 0)
        self.assertEqual(FileAttachment.objects.count(), 0)
Beispiel #8
0
 def test_download_comment_attachment_without_accel_redirect(self):
     tmp_file = SimpleUploadedFile('filename.txt', b'File content')
     comment = CommentFactory(userrequest=self.request, attachment=tmp_file)
     self._set_permissions([
         'can_comment_requests',
     ])
     response = self._get_comment_attachment(pk=comment.pk)
     self.assertEqual(status.HTTP_200_OK, response.status_code)
     self.assertEqual(f'attachment; filename={tmp_file.name}',
                      response.get('Content-Disposition'))
     tmp_file.seek(0)
     self.assertEqual(response.content, tmp_file.read())
Beispiel #9
0
class GeometryFileFieldTestCase(SimpleTestCase):
    def setUp(self):
        self.field = GeometryFileField()
        self.fp = SimpleUploadedFile(
            'geom.json', json.dumps(_geom).encode('ascii'))
        self.fp.seek(0)

    def test_to_python(self):
        self.assertIsInstance(self.field.to_python(self.fp), OGRGeometry)
        fp = SimpleUploadedFile('empty.json', b'{}')
        self.assertRaises(forms.ValidationError, self.field.to_python, fp)

    def test_feature_to_python(self):
        feature = Feature(geometry=_geom)
        self.fp.write(str(feature).encode('ascii'))
        self.fp.seek(0)
        v = self.field.to_python(self.fp)
        self.assertIsInstance(v, OGRGeometry)

    def test_shapefile(self):
        base = 'dir/geofield.shp'
        path = default_storage.path(base)
        os.mkdir(os.path.dirname(path))
        write_shp(path, _geom)
        b = io.BytesIO()
        with zipfile.ZipFile(b, 'w') as zf:
            for ext in ('dbf', 'prj', 'shp', 'shx'):
                fname = base.replace('shp', ext)
                with default_storage.open(fname) as fp:
                    zf.writestr(fname, fp.read())
        shutil.rmtree(os.path.dirname(path))
        upfile = SimpleUploadedFile('geofield.zip', b.getvalue())
        b.close()
        result = self.field.to_python(upfile)
        self.assertIsInstance(result, OGRGeometry)
        self.assertIsNotNone(result.srs)

    def test_zipfile(self):
        zfile = io.BytesIO()
        with zipfile.ZipFile(zfile, 'w') as zf:
            zf.writestr(self.fp.name, self.fp.read())
        zfile.seek(0)
        upfile = SimpleUploadedFile('geofield.zip', zfile.read())
        zfile.close()
        self.assertIsInstance(self.field.to_python(upfile), OGRGeometry)

    def tearDown(self):
        self.fp.close()
Beispiel #10
0
class GeometryFileFieldTestCase(SimpleTestCase):
    def setUp(self):
        self.field = GeometryFileField()
        self.fp = SimpleUploadedFile('geom.json',
                                     json.dumps(_geom).encode('ascii'))
        self.fp.seek(0)

    def test_to_python(self):
        self.assertIsInstance(self.field.to_python(self.fp), OGRGeometry)
        fp = SimpleUploadedFile('empty.json', b'{}')
        self.assertRaises(forms.ValidationError, self.field.to_python, fp)

    def test_feature_to_python(self):
        feature = Feature(geometry=_geom)
        self.fp.write(str(feature).encode('ascii'))
        self.fp.seek(0)
        v = self.field.to_python(self.fp)
        self.assertIsInstance(v, OGRGeometry)

    def test_shapefile(self):
        base = 'dir/geofield.shp'
        path = default_storage.path(base)
        os.mkdir(os.path.dirname(path))
        write_shp(path, _geom)
        b = io.BytesIO()
        with zipfile.ZipFile(b, 'w') as zf:
            for ext in ('dbf', 'prj', 'shp', 'shx'):
                fname = base.replace('shp', ext)
                with default_storage.open(fname) as fp:
                    zf.writestr(fname, fp.read())
        shutil.rmtree(os.path.dirname(path))
        upfile = SimpleUploadedFile('geofield.zip', b.getvalue())
        b.close()
        result = self.field.to_python(upfile)
        self.assertIsInstance(result, OGRGeometry)
        self.assertIsNotNone(result.srs)

    def test_zipfile(self):
        zfile = io.BytesIO()
        with zipfile.ZipFile(zfile, 'w') as zf:
            zf.writestr(self.fp.name, self.fp.read())
        zfile.seek(0)
        upfile = SimpleUploadedFile('geofield.zip', zfile.read())
        zfile.close()
        self.assertIsInstance(self.field.to_python(upfile), OGRGeometry)

    def tearDown(self):
        self.fp.close()
Beispiel #11
0
 def test_should_reject_patch_with_capture_in_progress(self):
     with open(os.path.join(TEST_ASSETS_DIR, 'target_capture_files', 'test.pdf'), 'rb') as test_file:
         data=test_file.read()
         file_content = SimpleUploadedFile("test.pdf", data, content_type="application/pdf")
         self.rejected_patch(self.in_progress_link_url,
                             user=self.registrar_user,
                             format="multipart",
                             data={'file':file_content},
                             expected_status_code=400)
         file_content.seek(0)
         self.in_progress_link.capture_job.status = 'failed'
         self.in_progress_link.capture_job.save()
         self.successful_patch(self.in_progress_link_url,
                             user=self.registrar_user,
                             format="multipart",
                             data={'file':file_content})
Beispiel #12
0
    def setUp(self) -> None:
        block_size = 65536
        imgfile = open(os.path.join(test_sample_storage, 'testimg.jpg'), 'rb')
        testimg = SimpleUploadedFile(imgfile.name, imgfile.read(), content_type="image/jpeg")
        testimg.seek(0)
        hasher = md5()
        for buf in iter(partial(imgfile.read, block_size), b''):
            hasher.update(buf)
        imgfile = ImageFile(testimg)
        self.hasher = hasher.hexdigest()
        self.original_w = imgfile.width
        self.origibal_h = imgfile.height
        img = Image.objects.create(name=testimg.name, photo=testimg, img_hash=self.hasher)
        img.save()

        self.list_resize = [{"width": 100, "height": 100},
                            {"width": 10, "height": 100},
                            {"width": 100, "height": 10},
                            {"width": 7680, "height": 4320},
                            {"size": 500000},
                            {"width": 100, "height": 100, "size": 500000},
                            {"width": 100, "height": 100, "size": 200000}, ]
Beispiel #13
0
class SimpleTest(TestCase):
    def setUp(self):
        self.u1 = User.objects.create(username='******')
        self.upload = SimpleUploadedFile('fake.pdf', 'This is a fake pdf')
        self.upload2 = SimpleUploadedFile('fake2.pdf', 'This is a second fake pdf')

    def test_document_upload(self):
        """
        Tests that we can upload a file and get it back
        """
        doc = uploaded_new_document(self.upload)
        doc.title = 'File uploaded'
        doc.author = self.u1
        doc.file.read()
        doc.save()

        doc = Document.objects.get(title='File uploaded')
        self.upload.seek(0)
        self.assertEqual(doc.file.read(), self.upload.read())

    def test_derived_document_upload(self):
        """
        Test derived file upload
        """
        doc = uploaded_new_document(self.upload)
        doc.title = 'File uploaded'
        doc.author = self.u1
        doc.save()

        derived = uploaded_new_derived_document(self.upload2)
        derived.derived_from = doc._blob
        derived.index = 0
        derived.save()

        derived2 = doc.get_derived_documents_of_type('pdf')[0]
        self.assertEqual(derived, derived2)
        self.upload2.seek(0)
        self.assertEqual(derived2.file.read(), self.upload2.read())

    def test_png_container(self):

        doc = uploaded_new_document(self.upload)
        doc.title = 'File uploaded'
        doc.author = self.u1
        doc.save()

        self.upload.name = 'fake.png'
        derived = uploaded_new_derived_document(self.upload)
        derived.derived_from = doc._blob
        derived.index = 0
        derived.save()

    def test_orphaned_blobs(self):
        """Make sure that deleting a Document does not result in orphanded blobs"""
        doc = uploaded_new_document(self.upload)
        doc.title = 'File uploaded'
        doc.author = self.u1
        doc.save()

        self.upload.name = 'fake.png'
        derived = uploaded_new_derived_document(self.upload)
        derived.derived_from = doc._blob
        derived.index = 0
        derived.save()

        DerivedDocument.objects.all().delete()
        self.assertEqual(DerivedBlob.objects.count(), 0)

        Document.objects.all().delete()
        self.assertEqual(ParentBlob.objects.count(), 0)

    def test_deleted_derived(self):
        """
        Make sure that derived documents are deleted when parent document and
        blob are deleted.
        """
        doc = uploaded_new_document(self.upload)
        doc.title = 'File uploaded'
        doc.author = self.u1
        doc.save()

        self.upload.name = 'fake.png'
        derived = uploaded_new_derived_document(self.upload)
        derived.derived_from = doc._blob
        derived.index = 0
        derived.save()

        Document.objects.all().delete()
        self.assertEqual(DerivedBlob.objects.count(), 0)

    def test_files_are_deleted(self):
        """Make sure file is deleted when blob is"""
        doc = uploaded_new_document(self.upload)
        doc.title = 'File uploaded'
        doc.author = self.u1
        doc.save()
        name = doc._blob.file.name

        from django.core.files.storage import get_storage_class
        storage = get_storage_class()()

        doc.delete()
        self.assertFalse(storage.exists(name))

    def test_auto_blob_creation(self):
        """
        Test that a blob is created when using file attriube on a document.
        """
        doc = Document(title='New Doc', file_name='A File', file=self.upload,
                                                                author=self.u1)
        doc.save()
        doc.file.seek(0)
        self.upload.seek(0)

        self.assertEqual(doc.file.read(), self.upload.read())
        self.assertEqual(ParentBlob.objects.count(), 1)

        doc.file.seek(0)
        self.upload.seek(0)

        self.assertEqual(ParentBlob.objects.all()[0].file.read(),
                                                        self.upload.read())

    def test_auto_derived_blob_creation(self):
        """
        Test that a blob is created when using the file attribue on a derived
        document.
        """
        doc = Document(title='New Doc', file_name='A File', file=self.upload,
                                                                author=self.u1)
        doc.save()
        derived = DerivedDocument(derived_from=doc._blob, index=0,
                                                        file=self.upload2)
        derived.save()

        self.upload2.seek(0)
        self.upload.seek(0)

        self.assertEqual(derived.file.read(), self.upload2.read())
        self.assertEqual(DerivedBlob.objects.count(), 1)

        self.upload2.seek(0)
        derived.file.seek(0)

        self.assertEqual(DerivedBlob.objects.all()[0].file.read(),
                                                        self.upload2.read())

    def test_read_only_file(self):

        doc = Document(title='New Doc', file_name='A File', file=self.upload,
                                                                author=self.u1)
        doc.save()

        self.assertRaises(ReadOnlyFileError, doc.file.delete)
        self.assertRaises(ReadOnlyFileError, doc.file.write, 'test')
        self.assertRaises(ReadOnlyFileError, doc.file.writelines, 'test')

    def tearDown(self):
        self.u1.delete()
        Document.objects.all().delete()
Beispiel #14
0
 def _open(self, name, mode="rb"):
     stream = SimpleUploadedFile(name, None)
     self.connection.get_blob_to_file(self.azure_container, name, stream)
     stream.seek(0)
     return stream
Beispiel #15
0
def test_upload_file(api_client, data_fixture, tmpdir):
    user, token = data_fixture.create_user_and_token(email="*****@*****.**",
                                                     password="******",
                                                     first_name="Test1")

    response = api_client.post(
        reverse("api:user_files:upload_file"),
        format="multipart",
        HTTP_AUTHORIZATION=f"JWT {token}",
    )
    assert response.status_code == HTTP_400_BAD_REQUEST
    assert response.json()["error"] == "ERROR_INVALID_FILE"

    response = api_client.post(
        reverse("api:user_files:upload_file"),
        data={"file": ""},
        format="multipart",
        HTTP_AUTHORIZATION=f"JWT {token}",
    )
    assert response.status_code == HTTP_400_BAD_REQUEST
    assert response.json()["error"] == "ERROR_INVALID_FILE"

    old_limit = settings.USER_FILE_SIZE_LIMIT
    settings.USER_FILE_SIZE_LIMIT = 6
    response = api_client.post(
        reverse("api:user_files:upload_file"),
        data={"file": SimpleUploadedFile("test.txt", b"Hello World")},
        format="multipart",
        HTTP_AUTHORIZATION=f"JWT {token}",
    )
    settings.USER_FILE_SIZE_LIMIT = old_limit
    assert response.status_code == HTTP_413_REQUEST_ENTITY_TOO_LARGE
    assert response.json()["error"] == "ERROR_FILE_SIZE_TOO_LARGE"
    assert response.json()["detail"] == (
        "The provided file is too large. Max 0MB is allowed.")

    response = api_client.post(
        reverse("api:user_files:upload_file"),
        data={"file": "not a file"},
        format="multipart",
        HTTP_AUTHORIZATION=f"JWT {token}",
    )
    assert response.status_code == HTTP_400_BAD_REQUEST
    assert response.json()["error"] == "ERROR_INVALID_FILE"

    storage = FileSystemStorage(location=str(tmpdir),
                                base_url="http://localhost")

    with patch("baserow.core.user_files.handler.default_storage", new=storage):
        with freeze_time("2020-01-01 12:00"):
            file = SimpleUploadedFile("test.txt", b"Hello World")
            response = api_client.post(
                reverse("api:user_files:upload_file"),
                data={"file": file},
                format="multipart",
                HTTP_AUTHORIZATION=f"JWT {token}",
            )

    response_json = response.json()
    assert response.status_code == HTTP_200_OK
    assert response_json["size"] == 11
    assert response_json["mime_type"] == "text/plain"
    assert response_json["is_image"] is False
    assert response_json["image_width"] is None
    assert response_json["image_height"] is None
    assert response_json["uploaded_at"] == "2020-01-01T12:00:00Z"
    assert response_json["thumbnails"] is None
    assert response_json["original_name"] == "test.txt"
    assert "localhost:8000" in response_json["url"]

    user_file = UserFile.objects.all().last()
    assert user_file.name == response_json["name"]
    assert response_json["url"].endswith(response_json["name"])
    file_path = tmpdir.join("user_files", user_file.name)
    assert file_path.isfile()

    with patch("baserow.core.user_files.handler.default_storage", new=storage):
        file = SimpleUploadedFile("test.txt", b"Hello World")
        response_2 = api_client.post(
            reverse("api:user_files:upload_file"),
            data={"file": file},
            format="multipart",
            HTTP_AUTHORIZATION=f"JWT {token}",
        )

    # The old file should be provided.
    assert response_2.json()["name"] == response_json["name"]
    assert response_json["original_name"] == "test.txt"

    image = Image.new("RGB", (100, 140), color="red")
    file = SimpleUploadedFile("test.png", b"")
    image.save(file, format="PNG")
    file.seek(0)

    with patch("baserow.core.user_files.handler.default_storage", new=storage):
        response = api_client.post(
            reverse("api:user_files:upload_file"),
            data={"file": file},
            format="multipart",
            HTTP_AUTHORIZATION=f"JWT {token}",
        )

    response_json = response.json()
    assert response.status_code == HTTP_200_OK
    assert response_json["mime_type"] == "image/png"
    assert response_json["is_image"] is True
    assert response_json["image_width"] == 100
    assert response_json["image_height"] == 140
    assert len(response_json["thumbnails"]) == 1
    assert "localhost:8000" in response_json["thumbnails"]["tiny"]["url"]
    assert "tiny" in response_json["thumbnails"]["tiny"]["url"]
    assert response_json["thumbnails"]["tiny"]["width"] == 21
    assert response_json["thumbnails"]["tiny"]["height"] == 21
    assert response_json["original_name"] == "test.png"

    user_file = UserFile.objects.all().last()
    file_path = tmpdir.join("user_files", user_file.name)
    assert file_path.isfile()
    file_path = tmpdir.join("thumbnails", "tiny", user_file.name)
    assert file_path.isfile()
    thumbnail = Image.open(file_path.open("rb"))
    assert thumbnail.height == 21
    assert thumbnail.width == 21
Beispiel #16
0
 def _open(self, name, mode="rb"):
     stream = SimpleUploadedFile(name, None)
     self.connection.get_blob_to_file(self.azure_container, name, stream)
     stream.seek(0)
     return stream
Beispiel #17
0
def fake_uploaded_file(file_path, mime_type):
    with open(file_path, 'rb') as fh:
        suf = SimpleUploadedFile(file_path.split('/')[-1], fh.read(), mime_type)
        suf.seek(0)
        return suf
Beispiel #18
0
class LargeFileAPITests(MockValidationsMixin, JWTAuthMixin, APITestCase):

    informatieobjecttype = INFORMATIEOBJECTTYPE
    scopes = [
        SCOPE_DOCUMENTEN_LOCK,
        SCOPE_DOCUMENTEN_AANMAKEN,
        SCOPE_DOCUMENTEN_ALLES_LEZEN,
        SCOPE_DOCUMENTEN_ALLES_VERWIJDEREN,
        SCOPE_DOCUMENTEN_BIJWERKEN,
    ]

    def _create_metadata(self):
        self.file_content = SimpleUploadedFile("file.txt", b"filecontentstring")
        content = {
            "identificatie": uuid.uuid4().hex,
            "bronorganisatie": "159351741",
            "creatiedatum": "2018-06-27",
            "titel": "detailed summary",
            "auteur": "test_auteur",
            "formaat": "txt",
            "taal": "eng",
            "bestandsnaam": "dummy.txt",
            "bestandsomvang": self.file_content.size,
            "link": "http://een.link",
            "beschrijving": "test_beschrijving",
            "informatieobjecttype": INFORMATIEOBJECTTYPE,
            "vertrouwelijkheidaanduiding": VertrouwelijkheidsAanduiding.openbaar,
        }
        list_url = reverse(EnkelvoudigInformatieObject)

        response = self.client.post(list_url, content)

        self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.data)

        self.eio = EnkelvoudigInformatieObject.objects.get()
        self.canonical = self.eio.canonical
        data = response.json()

        self.assertEqual(
            self.eio.vertrouwelijkheidaanduiding, VertrouwelijkheidsAanduiding.openbaar
        )
        self.assertEqual(self.eio.titel, "detailed summary")
        self.assertEqual(self.eio.inhoud, "")
        self.assertEqual(self.canonical.bestandsdelen.count(), 2)
        self.assertEqual(data["locked"], True)
        self.assertEqual(data["lock"], self.canonical.lock)

        self.bestandsdelen = self.canonical.bestandsdelen.order_by("volgnummer").all()

        for part in self.bestandsdelen:
            self.assertEqual(part.voltooid, False)
            self.assertEqual(part.inhoud, "")
        self.assertEqual(self.bestandsdelen[0].omvang, settings.CHUNK_SIZE)
        self.assertEqual(
            self.bestandsdelen[1].omvang, self.file_content.size - settings.CHUNK_SIZE
        )

    def _upload_part_files(self):
        part_files = split_file(self.file_content, settings.CHUNK_SIZE)
        for part in self.bestandsdelen:
            part_url = get_operation_url("bestandsdeel_update", uuid=part.uuid)

            response = self.client.put(
                part_url,
                {"inhoud": part_files.pop(0), "lock": self.canonical.lock},
                format="multipart",
            )

            self.assertEqual(response.status_code, status.HTTP_200_OK, response.data)

            part.refresh_from_db()

            self.assertNotEqual(part.inhoud, "")
            self.assertEqual(part.voltooid, True)

    def _unlock(self):
        unlock_url = get_operation_url(
            "enkelvoudiginformatieobject_unlock", uuid=self.eio.uuid
        )

        response = self.client.post(unlock_url, {"lock": self.canonical.lock})

        self.assertEqual(
            response.status_code, status.HTTP_204_NO_CONTENT, response.data
        )

        self.canonical.refresh_from_db()
        self.eio.refresh_from_db()

        self.assertEqual(self.canonical.bestandsdelen.count(), 0)
        self.assertNotEqual(self.eio.inhoud.path, "")
        self.assertEqual(self.eio.inhoud.size, self.file_content.size)

    def _download_file(self):
        file_url = get_operation_url(
            "enkelvoudiginformatieobject_download", uuid=self.eio.uuid
        )

        response = self.client.get(file_url)

        self.assertEqual(response.status_code, status.HTTP_200_OK)
        self.assertEqual(response.content, b"filecontentstring")

    def test_create_eio_full_process(self):
        """
        Test the create process of the documents with part files

        1. Create document metadata
        Input:
        * inhoud - None
        * bestandsomvang > 0

        Expected result:
        * document is already locked after creation
        * file link is None
        * bestandsdelen objects are created based on the bestandsomvang

        2. Upload part files
        Input:
        * part files which are the result of splitting the initial file
        * lock

        Expected result:
        * for all part files voltooid = True

        3. Unlock document
        Expected result:
        * part files merged into the whole file
        * file link points to this file
        * bestandsdelen objects are deleted

        4. Download file
        Expected result:
        * file is downloadable via the file link
        """

        self._create_metadata()
        self._upload_part_files()
        self._unlock()
        self._download_file()

    def test_upload_part_wrong_size(self):
        """
        Test the upload of the incorrect part file

        Input:
        * part files with the size different from grootte field
        * lock

        Expected result:
        * 400 status because of the difference between expected and actual file sizes
        """
        self._create_metadata()

        # change file size for part file
        part = self.bestandsdelen[0]
        part.omvang = part.omvang + 1
        part.save()

        # upload part file
        part_url = get_operation_url("bestandsdeel_update", uuid=part.uuid)
        part_file = split_file(self.file_content, settings.CHUNK_SIZE)[0]

        response = self.client.put(
            part_url,
            {"inhoud": part_file, "lock": self.canonical.lock},
            format="multipart",
        )

        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)

        error = get_validation_errors(response, "nonFieldErrors")
        self.assertEqual(error["code"], "file-size")

    def test_upload_part_twice_correct(self):
        """
        Test the upload of the same part file several times

        Input:
        * part file
        * lock

        Expected result:
        * the repeated upload of the same file is permiitted. Voltooid = True
        """
        self._create_metadata()
        self._upload_part_files()

        # upload one of parts again
        self.file_content.seek(0)
        part_files = split_file(self.file_content, settings.CHUNK_SIZE)
        part = self.bestandsdelen[0]
        part_url = get_operation_url("bestandsdeel_update", uuid=part.uuid)

        response = self.client.put(
            part_url,
            {"inhoud": part_files[0], "lock": self.canonical.lock},
            format="multipart",
        )

        self.assertEqual(response.status_code, status.HTTP_200_OK, response.data)

        part.refresh_from_db()

        self.assertNotEqual(part.inhoud, "")
        self.assertEqual(part.voltooid, True)

    def test_unlock_without_uploading(self):
        """
        Test the unlock of the document with no part files uploaded
        Input:
        * bestandsomvang of the document > 0
        * bestandsdelen objects are created but not uploaded

        Expected result:
        * 400 status because the expected size of the file > 0
        """
        self._create_metadata()

        # unlock
        unlock_url = get_operation_url(
            "enkelvoudiginformatieobject_unlock", uuid=self.eio.uuid
        )

        response = self.client.post(unlock_url, {"lock": self.canonical.lock})

        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)

        error = get_validation_errors(response, "nonFieldErrors")
        self.assertEqual(error["code"], "file-size")

    def test_unlock_not_finish_upload(self):
        """
        Test the unlock of the document with not all part files uploaded
        Input:
        * bestandsomvang of the document > 0
        * bestandsdelen objects are created, some of them are uploaded

        Expected result:
        * 400 status because the upload of part files is incomplete
        """
        self._create_metadata()

        # unload 1 part of file
        part_file = split_file(self.file_content, settings.CHUNK_SIZE)[0]
        part = self.bestandsdelen[0]
        part_url = get_operation_url("bestandsdeel_update", uuid=part.uuid)

        response = self.client.put(
            part_url,
            {"inhoud": part_file, "lock": self.canonical.lock},
            format="multipart",
        )

        self.assertEqual(response.status_code, status.HTTP_200_OK, response.data)

        # unlock
        unlock_url = get_operation_url(
            "enkelvoudiginformatieobject_unlock", uuid=self.eio.uuid
        )

        response = self.client.post(unlock_url, {"lock": self.canonical.lock})

        self.assertEqual(
            response.status_code, status.HTTP_400_BAD_REQUEST, response.data
        )

        error = get_validation_errors(response, "nonFieldErrors")
        self.assertEqual(error["code"], "incomplete-upload")

    def test_unlock_not_finish_upload_force(self):
        """
        Test the unlock of the document with not all part files uploaded
        Input:
        * bestandsomvang of the document > 0
        * bestandsdelen objects are created, some of them are uploaded
        * client has 'documenten.geforceerd-unlock' scope

        Expected result:
        * document is unlocked
        * all bestandsdelen are deleted
        * bestandsomvang is None
        """
        self.autorisatie.scopes = self.autorisatie.scopes + [
            SCOPE_DOCUMENTEN_GEFORCEERD_UNLOCK
        ]
        self.autorisatie.save()
        self._create_metadata()

        # unload 1 part of file
        part_file = split_file(self.file_content, settings.CHUNK_SIZE)[0]
        part = self.bestandsdelen[0]
        part_url = get_operation_url("bestandsdeel_update", uuid=part.uuid)

        response = self.client.put(
            part_url,
            {"inhoud": part_file, "lock": self.canonical.lock},
            format="multipart",
        )

        self.assertEqual(response.status_code, status.HTTP_200_OK, response.data)

        # force unlock
        unlock_url = get_operation_url(
            "enkelvoudiginformatieobject_unlock", uuid=self.eio.uuid
        )

        response = self.client.post(unlock_url)

        self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)

        self.eio.refresh_from_db()
        self.canonical.refresh_from_db()

        self.assertEqual(self.eio.bestandsomvang, None)
        self.assertEqual(self.canonical.bestandsdelen.count(), 0)

    def test_update_metadata_without_upload(self):
        """
        Test the update process of the document metadata
        Input:
        * updated fields don't include bestandsomvang and inhoud

        Expected result:
        * new version of document is not created during lock since the object was created with lock
        * bestandsdelen objects are created
        """
        self._create_metadata()

        # update file metadata
        eio_url = get_operation_url(
            "enkelvoudiginformatieobject_read", uuid=self.eio.uuid
        )

        response = self.client.patch(
            eio_url, {"beschrijving": "beschrijving2", "lock": self.canonical.lock}
        )

        self.assertEqual(response.status_code, status.HTTP_200_OK, response.data)

        data = response.json()
        self.eio.refresh_from_db()

        self.assertIsNone(data["inhoud"])  # the link to download is None
        self.assertEqual(len(data["bestandsdelen"]), 2)
        self.assertEqual(self.eio.beschrijving, "beschrijving2")

    def test_update_metadata_after_unfinished_upload(self):
        """
        Test the update process of the document metadata with some of part files uploaded
        Input:
        * updated fields don't include bestandsomvang and inhoud

        Expected result:
        * bestandsdelen objects are regenerated
        * all uploaded part files are lost
        """
        self._create_metadata()

        # unload 1 part of file
        part_file = split_file(self.file_content, settings.CHUNK_SIZE)[0]
        part = self.bestandsdelen[0]
        part_url = get_operation_url("bestandsdeel_update", uuid=part.uuid)

        response = self.client.put(
            part_url,
            {"inhoud": part_file, "lock": self.canonical.lock},
            format="multipart",
        )

        self.assertEqual(response.status_code, status.HTTP_200_OK, response.data)

        part.refresh_from_db()
        self.assertEqual(part.voltooid, True)

        # update metedata
        eio_url = get_operation_url(
            "enkelvoudiginformatieobject_read", uuid=self.eio.uuid
        )

        response = self.client.patch(
            eio_url, {"beschrijving": "beschrijving2", "lock": self.canonical.lock}
        )

        self.assertEqual(response.status_code, status.HTTP_200_OK)

        self.canonical.refresh_from_db()
        part_new = self.canonical.bestandsdelen.order_by("volgnummer").first()

        self.assertEqual(self.canonical.bestandsdelen.count(), 2)
        self.assertEqual(self.canonical.empty_bestandsdelen, True)
        self.assertEqual(part_new.voltooid, False)

    def test_update_metadata_set_size(self):
        """
        Test the update process of the file size with some of part files uploaded
        Input:
        * bestandsomvang > 0

        Expected result:
        * bestandsdelen objects are regenerated based on the new bestandsomvang
        * all uploaded part files are lost
        """
        self._create_metadata()

        # update file metadata
        eio_url = get_operation_url(
            "enkelvoudiginformatieobject_read", uuid=self.eio.uuid
        )

        response = self.client.patch(
            eio_url, {"bestandsomvang": 45, "lock": self.canonical.lock}
        )

        self.assertEqual(response.status_code, status.HTTP_200_OK, response.data)

        data = response.json()
        self.canonical.refresh_from_db()
        self.eio.refresh_from_db()

        self.assertEqual(self.eio.bestandsomvang, 45)
        self.assertEqual(self.canonical.bestandsdelen.count(), 5)
        self.assertEqual(data["inhoud"], None)

    def test_update_metadata_set_size_zero(self):
        """
        Test the update process of the file size = 0
        Input:
        * bestandsomvang = 0

        Expected result:
        * bestandsdelen objects are removed
        * empty file is created
        * file link points to this empty file
        """
        self._create_metadata()

        # update file metadata
        eio_url = get_operation_url(
            "enkelvoudiginformatieobject_read", uuid=self.eio.uuid
        )

        response = self.client.patch(
            eio_url, {"bestandsomvang": 0, "lock": self.canonical.lock}
        )

        self.assertEqual(response.status_code, status.HTTP_200_OK, response.data)

        data = response.json()
        self.canonical.refresh_from_db()
        self.eio.refresh_from_db()
        file_url = get_operation_url(
            "enkelvoudiginformatieobject_download", uuid=self.eio.uuid
        )

        self.assertEqual(self.eio.bestandsomvang, 0)
        self.assertEqual(self.canonical.bestandsdelen.count(), 0)
        self.assertEqual(data["inhoud"], f"http://testserver{file_url}?versie=1")

    def test_update_metadata_set_size_null(self):
        """
        Test the remove of file from the document
        Input:
        * bestandsomvang = None

        Expected result:
        * bestandsdelen objects are removed
        * file link is None
        """
        self._create_metadata()

        # update file metadata
        eio_url = get_operation_url(
            "enkelvoudiginformatieobject_read", uuid=self.eio.uuid
        )

        response = self.client.patch(
            eio_url, {"bestandsomvang": None, "lock": self.canonical.lock}
        )

        self.assertEqual(response.status_code, status.HTTP_200_OK, response.data)

        data = response.json()
        self.canonical.refresh_from_db()
        self.eio.refresh_from_db()

        self.assertEqual(self.eio.bestandsomvang, None)
        self.assertEqual(self.canonical.bestandsdelen.count(), 0)
        self.assertEqual(data["inhoud"], None)
class AttachmentUploadTestCase(TestCase):
    def setUp(self):
        self.api_url = '/api/attachments'
        self.file = SimpleUploadedFile('test', b'0' * 2 ** 25)
        self.md5 = ''

    def tearDown(self):
        try:
            attachment = Attachment.objects.get(md5=self.md5)
        except:
            pass
        else:
            attachment.delete_file()

    def test_attachment_upload(self):
        HTTP_CONTENT_RANGE = 'bytes {start}-{end}/{total}'
        chunk_size = 2097152
        md5_value = md5()
        while True:
            data_flow = self.file.read(chunk_size)  # 每次读入2M进入内存
            if not data_flow:  # 读取完后返回空值,False
                break
            md5_value.update(data_flow)
        self.md5 = md5_value.hexdigest()

        url = self.api_url + '/' + self.md5

        start = 0
        end = 0
        self.file.open('rb')
        while True:
            start = end
            end = start + chunk_size
            data_flow = self.file.read(chunk_size)
            if not data_flow or end >= self.file._size / 2:  # 模拟文件上传到一半断开连接
                break

            headers = {
                'HTTP_CONTENT_RANGE': HTTP_CONTENT_RANGE.format(
                    start=start,
                    end=end,
                    total=self.file._size
                )
            }
            res = self.client.put(url, encode_multipart(BOUNDARY, {
                'filename': self.file.name,
                'file': SimpleUploadedFile('test', data_flow)
            }), content_type=MULTIPART_CONTENT, **headers)
            self.assertEqual(res.status_code, 200)

        res = self.client.get(url)
        self.assertEqual(res.status_code, 200)

        status = 1  # 上传中的状态
        offset = res.json()['offset']
        start = offset
        end = offset
        self.file.open('rb')
        self.file.seek(offset)
        while True:
            start = end
            end = start + chunk_size
            data_flow = self.file.read(chunk_size)
            if not data_flow:
                break

            headers = {
                'HTTP_CONTENT_RANGE': HTTP_CONTENT_RANGE.format(
                    start=start,
                    end=end,
                    total=self.file._size
                )
            }
            res = self.client.put(url, encode_multipart(BOUNDARY, {
                'filename': self.file.name,
                'file': SimpleUploadedFile('test', data_flow)
            }), content_type=MULTIPART_CONTENT, **headers)
            self.assertEqual(res.status_code, 200)
            status = res.json()['status']
        self.assertEqual(status, 2)
Beispiel #20
0
def test_upload_file(api_client, data_fixture, tmpdir):
    user, token = data_fixture.create_user_and_token(email='*****@*****.**',
                                                     password='******',
                                                     first_name='Test1')

    response = api_client.post(reverse('api:user_files:upload_file'),
                               format='multipart',
                               HTTP_AUTHORIZATION=f'JWT {token}')
    assert response.status_code == HTTP_400_BAD_REQUEST
    assert response.json()['error'] == 'ERROR_INVALID_FILE'

    response = api_client.post(reverse('api:user_files:upload_file'),
                               data={'file': ''},
                               format='multipart',
                               HTTP_AUTHORIZATION=f'JWT {token}')
    assert response.status_code == HTTP_400_BAD_REQUEST
    assert response.json()['error'] == 'ERROR_INVALID_FILE'

    old_limit = settings.USER_FILE_SIZE_LIMIT
    settings.USER_FILE_SIZE_LIMIT = 6
    response = api_client.post(
        reverse('api:user_files:upload_file'),
        data={'file': SimpleUploadedFile('test.txt', b'Hello World')},
        format='multipart',
        HTTP_AUTHORIZATION=f'JWT {token}')
    settings.USER_FILE_SIZE_LIMIT = old_limit
    assert response.status_code == HTTP_413_REQUEST_ENTITY_TOO_LARGE
    assert response.json()['error'] == 'ERROR_FILE_SIZE_TOO_LARGE'
    assert response.json()['detail'] == (
        'The provided file is too large. Max 0MB is allowed.')

    response = api_client.post(reverse('api:user_files:upload_file'),
                               data={'file': 'not a file'},
                               format='multipart',
                               HTTP_AUTHORIZATION=f'JWT {token}')
    assert response.status_code == HTTP_400_BAD_REQUEST
    assert response.json()['error'] == 'ERROR_INVALID_FILE'

    storage = FileSystemStorage(location=str(tmpdir),
                                base_url='http://localhost')

    with patch('baserow.core.user_files.handler.default_storage', new=storage):
        with freeze_time('2020-01-01 12:00'):
            file = SimpleUploadedFile('test.txt', b'Hello World')
            response = api_client.post(reverse('api:user_files:upload_file'),
                                       data={'file': file},
                                       format='multipart',
                                       HTTP_AUTHORIZATION=f'JWT {token}')

    response_json = response.json()
    assert response.status_code == HTTP_200_OK
    assert response_json['size'] == 11
    assert response_json['mime_type'] == 'text/plain'
    assert response_json['is_image'] is False
    assert response_json['image_width'] is None
    assert response_json['image_height'] is None
    assert response_json['uploaded_at'] == '2020-01-01T12:00:00Z'
    assert response_json['thumbnails'] is None
    assert response_json['original_name'] == 'test.txt'
    assert 'localhost:8000' in response_json['url']

    user_file = UserFile.objects.all().last()
    assert user_file.name == response_json['name']
    assert response_json['url'].endswith(response_json['name'])
    file_path = tmpdir.join('user_files', user_file.name)
    assert file_path.isfile()

    with patch('baserow.core.user_files.handler.default_storage', new=storage):
        file = SimpleUploadedFile('test.txt', b'Hello World')
        response_2 = api_client.post(reverse('api:user_files:upload_file'),
                                     data={'file': file},
                                     format='multipart',
                                     HTTP_AUTHORIZATION=f'JWT {token}')

    # The old file should be provided.
    assert response_2.json()['name'] == response_json['name']
    assert response_json['original_name'] == 'test.txt'

    image = Image.new('RGB', (100, 140), color='red')
    file = SimpleUploadedFile('test.png', b'')
    image.save(file, format='PNG')
    file.seek(0)

    with patch('baserow.core.user_files.handler.default_storage', new=storage):
        response = api_client.post(reverse('api:user_files:upload_file'),
                                   data={'file': file},
                                   format='multipart',
                                   HTTP_AUTHORIZATION=f'JWT {token}')

    response_json = response.json()
    assert response.status_code == HTTP_200_OK
    assert response_json['mime_type'] == 'image/png'
    assert response_json['is_image'] is True
    assert response_json['image_width'] == 100
    assert response_json['image_height'] == 140
    assert len(response_json['thumbnails']) == 1
    assert 'localhost:8000' in response_json['thumbnails']['tiny']['url']
    assert 'tiny' in response_json['thumbnails']['tiny']['url']
    assert response_json['thumbnails']['tiny']['width'] == 21
    assert response_json['thumbnails']['tiny']['height'] == 21
    assert response_json['original_name'] == 'test.png'

    user_file = UserFile.objects.all().last()
    file_path = tmpdir.join('user_files', user_file.name)
    assert file_path.isfile()
    file_path = tmpdir.join('thumbnails', 'tiny', user_file.name)
    assert file_path.isfile()
    thumbnail = Image.open(file_path.open('rb'))
    assert thumbnail.height == 21
    assert thumbnail.width == 21
Beispiel #21
0
class ImageUploadTestCase(TestCase):

    @classmethod
    def setUpClass(cls):
        cls.testing_server = ThreadedHTTPServer("", 8001)
        cls.testing_server.start()

    def setUp(self) -> None:
        imgfile = open(os.path.join(test_sample_storage, 'testimg.jpg'), 'rb')
        self.testimg = SimpleUploadedFile(imgfile.name, imgfile.read(), content_type="image/jpeg")
        self.url = 'http://127.0.0.1:8001/urlupload.jpg'
        self.not_img_url = 'http://127.0.0.1:8001/menya_nelzya_zagrujat.txt'

    @classmethod
    def tearDownClass(cls):
        cls.testing_server.stop()
        shutil.rmtree(test_img_storage_form)

    @override_settings(MEDIA_ROOT=test_img_storage_form)
    def test_both_field_fill(self):
        resp = self.client.post(reverse('img_upload'), {'url': self.url, 'photo': self.testimg})
        self.assertFormError(resp, 'form', None, 'fill in one form field at a time')

    @override_settings(MEDIA_ROOT=test_img_storage_form)
    def test_rawfile_upload(self):
        resp = self.client.post(reverse('img_upload'), {'photo': self.testimg})
        saved_img = Image.objects.get(id=1)
        self.assertEqual(resp.status_code, 302)
        self.assertEqual(saved_img.img_hash, "ec0356b6ba7b427a92c68d274cc6e444")

    @override_settings(MEDIA_ROOT=test_img_storage_form)
    def test_dublicate_upload(self):
        resp = self.client.post(reverse('img_upload'), {'photo': self.testimg})
        self.assertEqual(resp.status_code, 302)
        self.testimg.seek(0)
        resp = self.client.post(reverse('img_upload'), {'photo': self.testimg})
        self.assertFormError(resp, 'form', None, 'The image is already in the database')

    @override_settings(MEDIA_ROOT=test_img_storage_form)
    def test_url_upload(self):
        resp = self.client.post(reverse('img_upload'), {'url': self.url})
        saved_img = Image.objects.get(id=1)
        self.assertEqual(resp.status_code, 302)
        self.assertEqual(saved_img.img_hash, "e4718b5266bb4c931d006c16a8e624cf")

    @override_settings(MEDIA_ROOT=test_img_storage_form)
    def test_url_not_picture(self):
        resp = self.client.post(reverse('img_upload'), {'url': self.not_img_url})
        self.assertFormError(resp, 'form', None, "Can't download file")

    @override_settings(MEDIA_ROOT=test_img_storage_form,
                       MAX_UPLOAD_SIZE=20)
    def test_rawfile_max_filesize(self):
        resp = self.client.post(reverse('img_upload'), {'photo': self.testimg})
        self.assertFormError(resp, 'form', 'photo', "File size greater than 1.9073486328125e-05 mb")

    @override_settings(MEDIA_ROOT=test_img_storage_form,
                       MAX_UPLOAD_SIZE=20)
    def test_url_max_filesize(self):
        resp = self.client.post(reverse('img_upload'), {'url': self.url})
        self.assertFormError(resp, 'form', 'url', "File size greater than 1.9073486328125e-05 mb")