def test_endpoint_s3(self) -> None: create_s3_buckets(settings.S3_AUTH_UPLOADS_BUCKET, settings.S3_AVATAR_BUCKET) with patch('zerver.views.public_export.queue_json_publish' ) as mock_publish: result = self.client_post('/json/export/realm') queue_data = mock_publish.call_args_list[0][0] worker = mock_publish.call_args_list[0][0][0] self.assert_json_success(result) mock_publish.assert_called_once() event = queue_data[1] self.assertEqual(worker, 'deferred_work') self.assertEqual(event['realm_id'], 1) self.assertEqual(event['user_profile_id'], 5) self.assertEqual(event['type'], 'realm_exported') with patch('zerver.lib.export.do_export_realm') as mock_export: result = self.client_post('/json/export/realm') args = mock_export.call_args_list[0][1] # TODO: Clean up the way we do the mocking here; we will # want to mock do_export_realm in a way that captures its # arguments but doesn't lead to (silent) error spam from # do_write_stats_file_for_realm_export. # # Probably setting a `side_effect` makes sense? self.assert_json_success(result) self.assertEqual(args['realm'], self.admin.realm) self.assertEqual(args['public_only'], True) self.assertEqual(args['output_dir'].startswith('/tmp/zulip-export-'), True) self.assertEqual(args['threads'], 6)
def test_export_files_from_s3(self) -> None: create_s3_buckets(settings.S3_AUTH_UPLOADS_BUCKET, settings.S3_AVATAR_BUCKET) realm = Realm.objects.get(string_id='zulip') attachment_path_id, emoji_path, original_avatar_path_id, test_image = self._setup_export_files( ) full_data = self._export_realm(realm) data = full_data['attachment'] self.assertEqual(len(data['zerver_attachment']), 1) record = data['zerver_attachment'][0] self.assertEqual(record['path_id'], attachment_path_id) def check_variable_type(user_profile_id: int, realm_id: int) -> None: self.assertEqual(type(user_profile_id), int) self.assertEqual(type(realm_id), int) # Test uploads fields = attachment_path_id.split('/') fn = os.path.join(full_data['uploads_dir'], os.path.join(fields[0], fields[1], fields[2])) with open(fn, 'r') as f: self.assertEqual(f.read(), 'zulip!') records = full_data['uploads_dir_records'] self.assertEqual(records[0]['path'], os.path.join(fields[0], fields[1], fields[2])) self.assertEqual(records[0]['s3_path'], attachment_path_id) check_variable_type(records[0]['user_profile_id'], records[0]['realm_id']) # Test emojis fn = os.path.join(full_data['emoji_dir'], emoji_path) fn = fn.replace('1.png', '') self.assertIn('1.png', os.listdir(fn)) records = full_data['emoji_dir_records'] self.assertEqual(records[0]['file_name'], '1.png') self.assertTrue('last_modified' in records[0]) self.assertEqual(records[0]['path'], '2/emoji/images/1.png') self.assertEqual(records[0]['s3_path'], '2/emoji/images/1.png') check_variable_type(records[0]['user_profile_id'], records[0]['realm_id']) # Test avatars fn = os.path.join(full_data['avatar_dir'], original_avatar_path_id) with open(fn, 'rb') as file: fn_data = file.read() self.assertEqual(fn_data, test_image) records = full_data['avatar_dir_records'] record_path = [record['path'] for record in records] record_s3_path = [record['s3_path'] for record in records] self.assertIn(original_avatar_path_id, record_path) self.assertIn(original_avatar_path_id, record_s3_path) check_variable_type(records[0]['user_profile_id'], records[0]['realm_id'])
def test_export_files_from_s3(self) -> None: create_s3_buckets( settings.S3_AUTH_UPLOADS_BUCKET, settings.S3_AVATAR_BUCKET) realm = Realm.objects.get(string_id='zulip') attachment_path_id, emoji_path, original_avatar_path_id, test_image = self._setup_export_files() full_data = self._export_realm(realm) data = full_data['attachment'] self.assertEqual(len(data['zerver_attachment']), 1) record = data['zerver_attachment'][0] self.assertEqual(record['path_id'], attachment_path_id) def check_variable_type(user_profile_id: int, realm_id: int) -> None: self.assertEqual(type(user_profile_id), int) self.assertEqual(type(realm_id), int) # Test uploads fields = attachment_path_id.split('/') fn = os.path.join(full_data['uploads_dir'], os.path.join(fields[0], fields[1], fields[2])) with open(fn) as f: self.assertEqual(f.read(), 'zulip!') records = full_data['uploads_dir_records'] self.assertEqual(records[0]['path'], os.path.join(fields[0], fields[1], fields[2])) self.assertEqual(records[0]['s3_path'], attachment_path_id) check_variable_type(records[0]['user_profile_id'], records[0]['realm_id']) # Test emojis fn = os.path.join(full_data['emoji_dir'], emoji_path) fn = fn.replace('1.png', '') self.assertIn('1.png', os.listdir(fn)) records = full_data['emoji_dir_records'] self.assertEqual(records[0]['file_name'], '1.png') self.assertTrue('last_modified' in records[0]) self.assertEqual(records[0]['path'], '1/emoji/images/1.png') self.assertEqual(records[0]['s3_path'], '1/emoji/images/1.png') check_variable_type(records[0]['user_profile_id'], records[0]['realm_id']) # Test avatars fn = os.path.join(full_data['avatar_dir'], original_avatar_path_id) fn_data = open(fn, 'rb').read() self.assertEqual(fn_data, test_image) records = full_data['avatar_dir_records'] record_path = [record['path'] for record in records] record_s3_path = [record['s3_path'] for record in records] self.assertIn(original_avatar_path_id, record_path) self.assertIn(original_avatar_path_id, record_s3_path) check_variable_type(records[0]['user_profile_id'], records[0]['realm_id'])
def test_transfer_emoji_to_s3(self) -> None: bucket = create_s3_buckets(settings.S3_AVATAR_BUCKET)[0] othello = self.example_user('othello') RealmEmoji.objects.all().delete() emoji_name = "emoji.png" with get_test_image_file("img.png") as image_file: emoji = check_add_realm_emoji(othello.realm, emoji_name, othello, image_file) if not emoji: raise AssertionError("Unable to add emoji.") emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format( realm_id=othello.realm_id, emoji_file_name=emoji.file_name, ) with self.assertLogs(level="INFO"): transfer_emoji_to_s3(1) self.assertEqual(len(list(bucket.objects.all())), 2) original_key = bucket.Object(emoji_path + ".original") resized_key = bucket.Object(emoji_path) with get_test_image_file("img.png") as image_file: image_data = image_file.read() resized_image_data = resize_emoji(image_data) self.assertEqual(image_data, original_key.get()['Body'].read()) self.assertEqual(resized_image_data, resized_key.get()['Body'].read())
def test_transfer_emoji_to_s3(self) -> None: bucket = create_s3_buckets(settings.S3_AVATAR_BUCKET)[0] othello = self.example_user('othello') RealmEmoji.objects.all().delete() emoji_name = "emoji.png" image_file = get_test_image_file("img.png") emoji = check_add_realm_emoji(othello.realm, emoji_name, othello, image_file) if not emoji: raise AssertionError("Unable to add emoji.") emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format( realm_id=othello.realm_id, emoji_file_name=emoji.file_name, ) transfer_emoji_to_s3(1) self.assertEqual(len(bucket.get_all_keys()), 2) original_key = bucket.get_key(emoji_path + ".original") resized_key = bucket.get_key(emoji_path) image_file.seek(0) image_data = image_file.read() resized_image_data = resize_emoji(image_data) self.assertEqual(image_data, original_key.get_contents_as_string()) self.assertEqual(resized_image_data, resized_key.get_contents_as_string())
def test_transfer_emoji_to_s3(self) -> None: bucket = create_s3_buckets(settings.S3_AVATAR_BUCKET)[0] othello = self.example_user('othello') RealmEmoji.objects.all().delete() emoji_name = "emoji.png" image_file = get_test_image_file("img.png") emoji = check_add_realm_emoji(othello.realm, emoji_name, othello, image_file) if not emoji: raise AssertionError("Unable to add emoji.") emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format( realm_id=othello.realm_id, emoji_file_name=emoji.file_name, ) transfer_emoji_to_s3(1) self.assertEqual(len(bucket.get_all_keys()), 2) original_key = bucket.get_key(emoji_path + ".original") resized_key = bucket.get_key(emoji_path) image_file.seek(0) image_data = image_file.read() resized_image_data = resize_emoji(image_data) self.assertEqual(image_data, original_key.get_contents_as_string()) self.assertEqual(resized_image_data, resized_key.get_contents_as_string())
def test_transfer_avatars_to_s3(self) -> None: bucket = create_s3_buckets(settings.S3_AVATAR_BUCKET)[0] self.login('hamlet') with get_test_image_file('img.png') as image_file: self.client_post("/json/users/me/avatar", {'file': image_file}) user = self.example_user("hamlet") with self.assertLogs(level="INFO"): transfer_avatars_to_s3(1) path_id = user_avatar_path(user) image_key = bucket.Object(path_id) original_image_key = bucket.Object(path_id + ".original") medium_image_key = bucket.Object(path_id + "-medium.png") self.assertEqual(len(list(bucket.objects.all())), 3) self.assertEqual(image_key.get()['Body'].read(), open(avatar_disk_path(user), "rb").read()) self.assertEqual( original_image_key.get()['Body'].read(), open(avatar_disk_path(user, original=True), "rb").read()) self.assertEqual( medium_image_key.get()['Body'].read(), open(avatar_disk_path(user, medium=True), "rb").read())
def test_transfer_avatars_to_s3(self) -> None: bucket = create_s3_buckets(settings.S3_AVATAR_BUCKET)[0] self.login('hamlet') with get_test_image_file('img.png') as image_file: self.client_post("/json/users/me/avatar", {'file': image_file}) user = self.example_user("hamlet") transfer_avatars_to_s3(1) path_id = user_avatar_path(user) image_key = bucket.get_key(path_id) original_image_key = bucket.get_key(path_id + ".original") medium_image_key = bucket.get_key(path_id + "-medium.png") self.assertEqual(len(bucket.get_all_keys()), 3) self.assertEqual(image_key.get_contents_as_string(), open(avatar_disk_path(user), "rb").read()) self.assertEqual( original_image_key.get_contents_as_string(), open(avatar_disk_path(user, original=True), "rb").read()) self.assertEqual( medium_image_key.get_contents_as_string(), open(avatar_disk_path(user, medium=True), "rb").read())
def test_endpoint_s3(self) -> None: admin = self.example_user('iago') self.login(admin.email) bucket = create_s3_buckets(settings.S3_AVATAR_BUCKET)[0] tarball_path = create_tarball_path() with patch('zerver.lib.export.do_export_realm', return_value=tarball_path) as mock_export: with self.settings(LOCAL_UPLOADS_DIR=None): result = self.client_post('/json/export/realm') self.assert_json_success(result) self.assertFalse(os.path.exists(tarball_path)) args = mock_export.call_args_list[0][1] self.assertEqual(args['realm'], admin.realm) self.assertEqual(args['public_only'], True) self.assertIn('/tmp/zulip-export-', args['output_dir']) self.assertEqual(args['threads'], 6) export_object = RealmAuditLog.objects.filter( event_type='realm_exported').first() uri = getattr(export_object, 'extra_data') self.assertIsNotNone(uri) path_id = re.sub('https://test-avatar-bucket.s3.amazonaws.com:443/', '', uri) self.assertEqual( bucket.get_key(path_id).get_contents_as_string(), b'zulip!')
def test_endpoint_s3(self) -> None: admin = self.example_user('iago') self.login_user(admin) bucket = create_s3_buckets(settings.S3_AVATAR_BUCKET)[0] tarball_path = create_dummy_file('test-export.tar.gz') # Test the export logic. with patch('zerver.lib.export.do_export_realm', return_value=tarball_path) as mock_export: with self.settings(LOCAL_UPLOADS_DIR=None), stdout_suppressed(): result = self.client_post('/json/export/realm') self.assert_json_success(result) self.assertFalse(os.path.exists(tarball_path)) args = mock_export.call_args_list[0][1] self.assertEqual(args['realm'], admin.realm) self.assertEqual(args['public_only'], True) self.assertIn('/tmp/zulip-export-', args['output_dir']) self.assertEqual(args['threads'], 6) # Get the entry and test that iago initiated it. audit_log_entry = RealmAuditLog.objects.filter( event_type=RealmAuditLog.REALM_EXPORTED).first() self.assertEqual(audit_log_entry.acting_user_id, admin.id) # Test that the file is hosted, and the contents are as expected. path_id = ujson.loads(audit_log_entry.extra_data).get('export_path') self.assertIsNotNone(path_id) self.assertEqual(bucket.Object(path_id).get()['Body'].read(), b'zulip!') result = self.client_get('/json/export/realm') self.assert_json_success(result) # Test that the export we have is the export we created. export_dict = result.json()['exports'] self.assertEqual(export_dict[0]['id'], audit_log_entry.id) self.assertEqual(export_dict[0]['export_url'], 'https://test-avatar-bucket.s3.amazonaws.com' + path_id) self.assertEqual(export_dict[0]['acting_user_id'], admin.id) self.assert_length(export_dict, RealmAuditLog.objects.filter( realm=admin.realm, event_type=RealmAuditLog.REALM_EXPORTED).count()) # Finally, delete the file. result = self.client_delete(f'/json/export/realm/{audit_log_entry.id}') self.assert_json_success(result) with self.assertRaises(botocore.exceptions.ClientError): bucket.Object(path_id).load() # Try to delete an export with a `deleted_timestamp` key. audit_log_entry.refresh_from_db() export_data = ujson.loads(audit_log_entry.extra_data) self.assertIn('deleted_timestamp', export_data) result = self.client_delete(f'/json/export/realm/{audit_log_entry.id}') self.assert_json_error(result, "Export already deleted") # Now try to delete a non-existent export. result = self.client_delete('/json/export/realm/0') self.assert_json_error(result, "Invalid data export ID")
def test_is_static_or_current_realm_url_with_s3(self) -> None: create_s3_buckets(settings.S3_AVATAR_BUCKET)[0] realm = self.example_user("hamlet").realm def test(url: str) -> bool: return is_static_or_current_realm_url(url, realm) upload_backend = zerver.lib.upload.upload_backend self.assertTrue( test(upload_backend.get_realm_icon_url(realm.id, version=1))) self.assertTrue( test( upload_backend.get_realm_logo_url(realm.id, version=1, night=False))) self.assertTrue(test(upload_backend.get_avatar_url("deadbeefcafe"))) self.assertTrue( test(upload_backend.get_emoji_url("emoji.gif", realm.id)))
def test_transfer_message_files(self) -> None: bucket = create_s3_buckets(settings.S3_AUTH_UPLOADS_BUCKET)[0] hamlet = self.example_user('hamlet') othello = self.example_user('othello') upload_message_file('dummy1.txt', len(b'zulip1!'), 'text/plain', b'zulip1!', hamlet) upload_message_file('dummy2.txt', len(b'zulip2!'), 'text/plain', b'zulip2!', othello) transfer_message_files_to_s3(1) attachments = Attachment.objects.all() self.assertEqual(len(bucket.get_all_keys()), 2) self.assertEqual(bucket.get_key(attachments[0].path_id).get_contents_as_string(), b'zulip1!') self.assertEqual(bucket.get_key(attachments[1].path_id).get_contents_as_string(), b'zulip2!')
def test_transfer_message_files(self) -> None: bucket = create_s3_buckets(settings.S3_AUTH_UPLOADS_BUCKET)[0] hamlet = self.example_user('hamlet') othello = self.example_user('othello') upload_message_file(u'dummy1.txt', len(b'zulip1!'), u'text/plain', b'zulip1!', hamlet) upload_message_file(u'dummy2.txt', len(b'zulip2!'), u'text/plain', b'zulip2!', othello) transfer_message_files_to_s3(1) attachments = Attachment.objects.all() self.assertEqual(len(bucket.get_all_keys()), 2) self.assertEqual(bucket.get_key(attachments[0].path_id).get_contents_as_string(), b'zulip1!') self.assertEqual(bucket.get_key(attachments[1].path_id).get_contents_as_string(), b'zulip2!')
def test_transfer_message_files(self) -> None: bucket = create_s3_buckets(settings.S3_AUTH_UPLOADS_BUCKET)[0] hamlet = self.example_user("hamlet") othello = self.example_user("othello") upload_message_file("dummy1.txt", len(b"zulip1!"), "text/plain", b"zulip1!", hamlet) upload_message_file("dummy2.txt", len(b"zulip2!"), "text/plain", b"zulip2!", othello) with self.assertLogs(level="INFO"): transfer_message_files_to_s3(1) attachments = Attachment.objects.all().order_by("id") self.assert_length(list(bucket.objects.all()), 2) self.assertEqual(bucket.Object(attachments[0].path_id).get()["Body"].read(), b"zulip1!") self.assertEqual(bucket.Object(attachments[1].path_id).get()["Body"].read(), b"zulip2!")
def test_endpoint_s3(self) -> None: admin = self.example_user('iago') self.login(admin.email) bucket = create_s3_buckets(settings.S3_AVATAR_BUCKET)[0] tarball_path = create_dummy_file('test-export.tar.gz') with patch('zerver.lib.export.do_export_realm', return_value=tarball_path) as mock_export: with self.settings(LOCAL_UPLOADS_DIR=None): result = self.client_post('/json/export/realm') self.assert_json_success(result) self.assertFalse(os.path.exists(tarball_path)) args = mock_export.call_args_list[0][1] self.assertEqual(args['realm'], admin.realm) self.assertEqual(args['public_only'], True) self.assertIn('/tmp/zulip-export-', args['output_dir']) self.assertEqual(args['threads'], 6) export_object = RealmAuditLog.objects.filter( event_type='realm_exported').first() path_id = ujson.loads(getattr(export_object, 'extra_data')).get('export_path') self.assertIsNotNone(path_id) self.assertEqual( bucket.get_key(path_id).get_contents_as_string(), b'zulip!') result = self.client_get('/json/export/realm') self.assert_json_success(result) export_dict = result.json()['exports'] self.assertEqual(export_dict[0]['extra_data'].get('export_path'), path_id) self.assertEqual(export_dict[0]['acting_user_id'], admin.id) self.assert_length( export_dict, RealmAuditLog.objects.filter( realm=admin.realm, event_type=RealmAuditLog.REALM_EXPORTED).count()) result = zerver.lib.upload.upload_backend.delete_export_tarball( path_id) self.assertEqual(result, path_id) self.assertIsNone(bucket.get_key(path_id))
def test_import_files_from_s3(self) -> None: uploads_bucket, avatar_bucket = create_s3_buckets( settings.S3_AUTH_UPLOADS_BUCKET, settings.S3_AVATAR_BUCKET) realm = Realm.objects.get(string_id='zulip') self._setup_export_files() self._export_realm(realm) with patch('logging.info'): do_import_realm( os.path.join(settings.TEST_WORKER_DIR, 'test-export'), 'test-zulip') imported_realm = Realm.objects.get(string_id='test-zulip') with open(get_test_image_file('img.png').name, 'rb') as f: test_image_data = f.read() # Test attachments uploaded_file = Attachment.objects.get(realm=imported_realm) self.assertEqual(len(b'zulip!'), uploaded_file.size) attachment_content = uploads_bucket.get_key( uploaded_file.path_id).get_contents_as_string() self.assertEqual(b"zulip!", attachment_content) # Test emojis realm_emoji = RealmEmoji.objects.get(realm=imported_realm) emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format( realm_id=imported_realm.id, emoji_file_name=realm_emoji.file_name, ) emoji_key = avatar_bucket.get_key(emoji_path) self.assertIsNotNone(emoji_key) self.assertEqual(emoji_key.key, emoji_path) # Test avatars user_email = Message.objects.all()[0].sender.email user_profile = UserProfile.objects.get(email=user_email, realm=imported_realm) avatar_path_id = user_avatar_path(user_profile) + ".original" original_image_key = avatar_bucket.get_key(avatar_path_id) self.assertEqual(original_image_key.key, avatar_path_id) image_data = original_image_key.get_contents_as_string() self.assertEqual(image_data, test_image_data)
def test_transfer_avatars_to_s3(self) -> None: bucket = create_s3_buckets(settings.S3_AVATAR_BUCKET)[0] self.login(self.example_email("hamlet")) with get_test_image_file('img.png') as image_file: self.client_post("/json/users/me/avatar", {'file': image_file}) user = self.example_user("hamlet") transfer_avatars_to_s3(1) path_id = user_avatar_path(user) image_key = bucket.get_key(path_id) original_image_key = bucket.get_key(path_id + ".original") medium_image_key = bucket.get_key(path_id + "-medium.png") self.assertEqual(len(bucket.get_all_keys()), 3) self.assertEqual(image_key.get_contents_as_string(), open(avatar_disk_path(user), "rb").read()) self.assertEqual(original_image_key.get_contents_as_string(), open(avatar_disk_path(user, original=True), "rb").read()) self.assertEqual(medium_image_key.get_contents_as_string(), open(avatar_disk_path(user, medium=True), "rb").read())
def test_transfer_message_files(self) -> None: bucket = create_s3_buckets(settings.S3_AUTH_UPLOADS_BUCKET)[0] hamlet = self.example_user('hamlet') othello = self.example_user('othello') upload_message_file('dummy1.txt', len(b'zulip1!'), 'text/plain', b'zulip1!', hamlet) upload_message_file('dummy2.txt', len(b'zulip2!'), 'text/plain', b'zulip2!', othello) with self.assertLogs(level="INFO"): transfer_message_files_to_s3(1) attachments = Attachment.objects.all().order_by("id") self.assertEqual(len(list(bucket.objects.all())), 2) self.assertEqual( bucket.Object(attachments[0].path_id).get()['Body'].read(), b'zulip1!') self.assertEqual( bucket.Object(attachments[1].path_id).get()['Body'].read(), b'zulip2!')
def test_import_files_from_s3(self) -> None: uploads_bucket, avatar_bucket = create_s3_buckets( settings.S3_AUTH_UPLOADS_BUCKET, settings.S3_AVATAR_BUCKET) realm = Realm.objects.get(string_id='zulip') self._setup_export_files() self._export_realm(realm) with patch('logging.info'): do_import_realm('var/test-export', 'test-zulip') imported_realm = Realm.objects.get(string_id='test-zulip') test_image_data = open(get_test_image_file('img.png').name, 'rb').read() # Test attachments uploaded_file = Attachment.objects.get(realm=imported_realm) self.assertEqual(len(b'zulip!'), uploaded_file.size) attachment_content = uploads_bucket.get_key(uploaded_file.path_id).get_contents_as_string() self.assertEqual(b"zulip!", attachment_content) # Test emojis realm_emoji = RealmEmoji.objects.get(realm=imported_realm) emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format( realm_id=imported_realm.id, emoji_file_name=realm_emoji.file_name, ) emoji_key = avatar_bucket.get_key(emoji_path) self.assertIsNotNone(emoji_key) self.assertEqual(emoji_key.key, emoji_path) # Test avatars user_email = Message.objects.all()[0].sender.email user_profile = UserProfile.objects.get(email=user_email, realm=imported_realm) avatar_path_id = user_avatar_path(user_profile) + ".original" original_image_key = avatar_bucket.get_key(avatar_path_id) self.assertEqual(original_image_key.key, avatar_path_id) image_data = original_image_key.get_contents_as_string() self.assertEqual(image_data, test_image_data)
def test_endpoint_s3(self) -> None: admin = self.example_user("iago") self.login_user(admin) bucket = create_s3_buckets(settings.S3_AVATAR_BUCKET)[0] tarball_path = create_dummy_file("test-export.tar.gz") # Test the export logic. with patch("zerver.lib.export.do_export_realm", return_value=tarball_path) as mock_export: with self.settings(LOCAL_UPLOADS_DIR=None), stdout_suppressed( ), self.assertLogs(level="INFO") as info_logs: result = self.client_post("/json/export/realm") self.assertTrue("INFO:root:Completed data export for zulip in " in info_logs.output[0]) self.assert_json_success(result) self.assertFalse(os.path.exists(tarball_path)) args = mock_export.call_args_list[0][1] self.assertEqual(args["realm"], admin.realm) self.assertEqual(args["public_only"], True) self.assertIn("/tmp/zulip-export-", args["output_dir"]) self.assertEqual(args["threads"], 6) # Get the entry and test that iago initiated it. audit_log_entry = RealmAuditLog.objects.filter( event_type=RealmAuditLog.REALM_EXPORTED).first() assert audit_log_entry is not None self.assertEqual(audit_log_entry.acting_user_id, admin.id) # Test that the file is hosted, and the contents are as expected. extra_data = audit_log_entry.extra_data assert extra_data is not None export_path = orjson.loads(extra_data)["export_path"] assert export_path.startswith("/") path_id = export_path[1:] self.assertEqual( bucket.Object(path_id).get()["Body"].read(), b"zulip!") result = self.client_get("/json/export/realm") self.assert_json_success(result) # Test that the export we have is the export we created. export_dict = result.json()["exports"] self.assertEqual(export_dict[0]["id"], audit_log_entry.id) self.assertEqual( export_dict[0]["export_url"], "https://test-avatar-bucket.s3.amazonaws.com" + export_path, ) self.assertEqual(export_dict[0]["acting_user_id"], admin.id) self.assert_length( export_dict, RealmAuditLog.objects.filter( realm=admin.realm, event_type=RealmAuditLog.REALM_EXPORTED).count(), ) # Finally, delete the file. result = self.client_delete(f"/json/export/realm/{audit_log_entry.id}") self.assert_json_success(result) with self.assertRaises(botocore.exceptions.ClientError): bucket.Object(path_id).load() # Try to delete an export with a `deleted_timestamp` key. audit_log_entry.refresh_from_db() extra_data = audit_log_entry.extra_data assert extra_data is not None export_data = orjson.loads(extra_data) self.assertIn("deleted_timestamp", export_data) result = self.client_delete(f"/json/export/realm/{audit_log_entry.id}") self.assert_json_error(result, "Export already deleted") # Now try to delete a non-existent export. result = self.client_delete("/json/export/realm/0") self.assert_json_error(result, "Invalid data export ID")
def test_s3_source_type(self) -> None: def get_file_path_urlpart(uri: str, size: str = '') -> str: url_in_result = 'smart/filters:no_upscale()%s/%s/source_type/s3' sharpen_filter = '' if size: url_in_result = '/%s/%s' % (size, url_in_result) sharpen_filter = ':sharpen(0.5,0.2,true)' hex_uri = base64.urlsafe_b64encode(uri.encode()).decode('utf-8') return url_in_result % (sharpen_filter, hex_uri) create_s3_buckets(settings.S3_AUTH_UPLOADS_BUCKET, settings.S3_AVATAR_BUCKET) self.login(self.example_email("hamlet")) fp = StringIO("zulip!") fp.name = "zulip.jpeg" result = self.client_post("/json/user_uploads", {'file': fp}) self.assert_json_success(result) json = ujson.loads(result.content) self.assertIn("uri", json) uri = json["uri"] base = '/user_uploads/' self.assertEqual(base, uri[:len(base)]) quoted_uri = urllib.parse.quote(uri[1:], safe='') # Test full size image. result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri)) self.assertEqual(result.status_code, 302, result) expected_part_url = get_file_path_urlpart(uri) self.assertIn(expected_part_url, result.url) # Test thumbnail size. result = self.client_get("/thumbnail?url=%s&size=thumbnail" % (quoted_uri)) self.assertEqual(result.status_code, 302, result) expected_part_url = get_file_path_urlpart(uri, '0x300') self.assertIn(expected_part_url, result.url) # Test custom emoji urls in Zulip messages. user_profile = self.example_user("hamlet") image_file = get_test_image_file("img.png") file_name = "emoji.png" upload_emoji_image(image_file, file_name, user_profile) custom_emoji_url = upload_backend.get_emoji_url( file_name, user_profile.realm_id) emoji_url_base = '/user_avatars/' self.assertEqual(emoji_url_base, custom_emoji_url[:len(emoji_url_base)]) quoted_emoji_url = urllib.parse.quote(custom_emoji_url[1:], safe='') # Test full size custom emoji image (for emoji link in messages case). result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_emoji_url)) self.assertEqual(result.status_code, 302, result) self.assertIn(custom_emoji_url, result.url) # Tests the /api/v1/thumbnail api endpoint with standard API auth self.logout() result = self.api_get(self.example_email("hamlet"), '/thumbnail?url=%s&size=full' % (quoted_uri, )) self.assertEqual(result.status_code, 302, result) expected_part_url = get_file_path_urlpart(uri) self.assertIn(expected_part_url, result.url) # Test with another user trying to access image using thumbor. self.login(self.example_email("iago")) result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri)) self.assertEqual(result.status_code, 403, result) self.assert_in_response("You are not authorized to view this file.", result)
def test_s3_source_type(self) -> None: def get_file_path_urlpart(uri: str, size: str='') -> str: url_in_result = 'smart/filters:no_upscale()%s/%s/source_type/s3' sharpen_filter = '' if size: url_in_result = '/%s/%s' % (size, url_in_result) sharpen_filter = ':sharpen(0.5,0.2,true)' hex_uri = base64.urlsafe_b64encode(uri.encode()).decode('utf-8') return url_in_result % (sharpen_filter, hex_uri) create_s3_buckets( settings.S3_AUTH_UPLOADS_BUCKET, settings.S3_AVATAR_BUCKET) self.login(self.example_email("hamlet")) fp = StringIO("zulip!") fp.name = "zulip.jpeg" result = self.client_post("/json/user_uploads", {'file': fp}) self.assert_json_success(result) json = ujson.loads(result.content) self.assertIn("uri", json) uri = json["uri"] base = '/user_uploads/' self.assertEqual(base, uri[:len(base)]) quoted_uri = urllib.parse.quote(uri[1:], safe='') # Test full size image. result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri)) self.assertEqual(result.status_code, 302, result) expected_part_url = get_file_path_urlpart(uri) self.assertIn(expected_part_url, result.url) # Test thumbnail size. result = self.client_get("/thumbnail?url=%s&size=thumbnail" % (quoted_uri)) self.assertEqual(result.status_code, 302, result) expected_part_url = get_file_path_urlpart(uri, '0x300') self.assertIn(expected_part_url, result.url) # Test custom emoji urls in Zulip messages. user_profile = self.example_user("hamlet") image_file = get_test_image_file("img.png") file_name = "emoji.png" upload_emoji_image(image_file, file_name, user_profile) custom_emoji_url = upload_backend.get_emoji_url(file_name, user_profile.realm_id) emoji_url_base = '/user_avatars/' self.assertEqual(emoji_url_base, custom_emoji_url[:len(emoji_url_base)]) quoted_emoji_url = urllib.parse.quote(custom_emoji_url[1:], safe='') # Test full size custom emoji image (for emoji link in messages case). result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_emoji_url)) self.assertEqual(result.status_code, 302, result) self.assertIn(custom_emoji_url, result.url) # Tests the /api/v1/thumbnail api endpoint with standard API auth self.logout() result = self.api_get( self.example_email("hamlet"), '/thumbnail?url=%s&size=full' % (quoted_uri,)) self.assertEqual(result.status_code, 302, result) expected_part_url = get_file_path_urlpart(uri) self.assertIn(expected_part_url, result.url) # Test with another user trying to access image using thumbor. self.login(self.example_email("iago")) result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri)) self.assertEqual(result.status_code, 403, result) self.assert_in_response("You are not authorized to view this file.", result)
def test_s3_source_type(self) -> None: def get_file_path_urlpart(uri: str, size: str = "") -> str: url_in_result = "smart/filters:no_upscale()%s/%s/source_type/s3" sharpen_filter = "" if size: url_in_result = f"/{size}/{url_in_result}" sharpen_filter = ":sharpen(0.5,0.2,true)" hex_uri = base64.urlsafe_b64encode(uri.encode()).decode("utf-8") return url_in_result % (sharpen_filter, hex_uri) create_s3_buckets(settings.S3_AUTH_UPLOADS_BUCKET, settings.S3_AVATAR_BUCKET) hamlet = self.example_user("hamlet") self.login_user(hamlet) fp = StringIO("zulip!") fp.name = "zulip.jpeg" result = self.client_post("/json/user_uploads", {"file": fp}) self.assert_json_success(result) json = orjson.loads(result.content) self.assertIn("uri", json) uri = json["uri"] base = "/user_uploads/" self.assertEqual(base, uri[:len(base)]) # Test full size image. result = self.client_get("/thumbnail", { "url": uri[1:], "size": "full" }) self.assertEqual(result.status_code, 302, result) expected_part_url = get_file_path_urlpart(uri) self.assertIn(expected_part_url, result.url) # Test thumbnail size. result = self.client_get("/thumbnail", { "url": uri[1:], "size": "thumbnail" }) self.assertEqual(result.status_code, 302, result) expected_part_url = get_file_path_urlpart(uri, "0x300") self.assertIn(expected_part_url, result.url) # Test custom emoji URLs in Zulip messages. user_profile = self.example_user("hamlet") file_name = "emoji.png" with get_test_image_file("img.png") as image_file: upload_emoji_image(image_file, file_name, user_profile) custom_emoji_url = upload_backend.get_emoji_url( file_name, user_profile.realm_id) emoji_url_base = "/user_avatars/" self.assertEqual(emoji_url_base, custom_emoji_url[:len(emoji_url_base)]) # Test full size custom emoji image (for emoji link in messages case). result = self.client_get("/thumbnail", { "url": custom_emoji_url[1:], "size": "full" }) self.assertEqual(result.status_code, 302, result) self.assertIn(custom_emoji_url, result.url) # Tests the /api/v1/thumbnail API endpoint with standard API auth self.logout() result = self.api_get(hamlet, "/thumbnail", { "url": uri[1:], "size": "full" }) self.assertEqual(result.status_code, 302, result) expected_part_url = get_file_path_urlpart(uri) self.assertIn(expected_part_url, result.url) # Test with another user trying to access image using thumbor. self.login("iago") result = self.client_get("/thumbnail", { "url": uri[1:], "size": "full" }) self.assertEqual(result.status_code, 403, result) self.assert_in_response("You are not authorized to view this file.", result)
def test_transfer_emoji_to_s3(self) -> None: bucket = create_s3_buckets(settings.S3_AVATAR_BUCKET)[0] othello = self.example_user("othello") RealmEmoji.objects.all().delete() emoji_name = "emoji.png" with get_test_image_file("img.png") as image_file: emoji = check_add_realm_emoji(othello.realm, emoji_name, othello, image_file) if not emoji: raise AssertionError("Unable to add emoji.") emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format( realm_id=othello.realm_id, emoji_file_name=emoji.file_name, ) with self.assertLogs(level="INFO"): transfer_emoji_to_s3(1) self.assert_length(list(bucket.objects.all()), 2) original_key = bucket.Object(emoji_path + ".original") resized_key = bucket.Object(emoji_path) image_data = read_test_image_file("img.png") resized_image_data, is_animated, still_image_data = resize_emoji(image_data) self.assertEqual(is_animated, False) self.assertEqual(still_image_data, None) self.assertEqual(image_data, original_key.get()["Body"].read()) self.assertEqual(resized_image_data, resized_key.get()["Body"].read()) emoji_name = "emoji2.png" with get_test_image_file("animated_img.gif") as image_file: emoji = check_add_realm_emoji(othello.realm, emoji_name, othello, image_file) if not emoji: raise AssertionError("Unable to add emoji.") emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format( realm_id=othello.realm_id, emoji_file_name=emoji.file_name, ) with self.assertLogs(level="INFO"): transfer_emoji_to_s3(1) self.assert_length(list(bucket.objects.all()), 5) original_key = bucket.Object(emoji_path + ".original") resized_key = bucket.Object(emoji_path) assert emoji.file_name still_key = bucket.Object( RealmEmoji.STILL_PATH_ID_TEMPLATE.format( realm_id=othello.realm_id, emoji_filename_without_extension=os.path.splitext(emoji.file_name)[0], ) ) image_data = read_test_image_file("animated_img.gif") resized_image_data, is_animated, still_image_data = resize_emoji(image_data) self.assertEqual(is_animated, True) self.assertEqual(type(still_image_data), bytes) self.assertEqual(image_data, original_key.get()["Body"].read()) self.assertEqual(resized_image_data, resized_key.get()["Body"].read()) self.assertEqual(still_image_data, still_key.get()["Body"].read())