def test_update_sync_status(file_errors, site_errors, status): """update_sync_status should update the website sync_status field as expected""" now = now_in_utc() website = WebsiteFactory.create(synced_on=now, sync_status=WebsiteSyncStatus.PROCESSING, sync_errors=site_errors) for error in file_errors: DriveFileFactory.create( website=website, sync_error=error, sync_dt=now, resource=(WebsiteContentFactory.create(type=CONTENT_TYPE_RESOURCE, website=website) if not error else None), status=(DriveFileStatus.COMPLETE if error is None else DriveFileStatus.FAILED), ) DriveFileFactory.create( website=website, sync_dt=now_in_utc() + timedelta(seconds=10), resource=WebsiteContentFactory.create(type=CONTENT_TYPE_RESOURCE, website=website), ) update_sync_status(website, now) website.refresh_from_db() assert website.sync_status == status assert sorted(website.sync_errors) == sorted( [error for error in file_errors if error] + (site_errors or []))
def permission_groups(): """Set up groups, users and websites for permission testing""" ( global_admin, global_author, site_owner, site_admin, site_editor, ) = UserFactory.create_batch(5) websites = WebsiteFactory.create_batch(2, owner=site_owner) global_admin.groups.add(Group.objects.get(name=constants.GLOBAL_ADMIN)) global_author.groups.add(Group.objects.get(name=constants.GLOBAL_AUTHOR)) site_admin.groups.add(websites[0].admin_group) site_editor.groups.add(websites[0].editor_group) website = websites[0] owner_content = WebsiteContentFactory.create(website=website, owner=website.owner) editor_content = WebsiteContentFactory.create(website=website, owner=site_editor) yield SimpleNamespace( global_admin=global_admin, global_author=global_author, site_admin=site_admin, site_editor=site_editor, websites=websites, owner_content=owner_content, editor_content=editor_content, )
def test_hugo_menu_yaml_serialize(omnibus_config): """HugoMenuYamlFileSerializer.serialize should create the expected file contents""" nav_menu_config_item = omnibus_config.find_item_by_name("navmenu") assert nav_menu_config_item is not None # Create page object referred to in the menu data WebsiteContentFactory.create( text_id=EXAMPLE_UUIDS[0], is_page_content=True, dirpath="path/to", filename="myfile", ) example_menu_data = get_example_menu_data() content = WebsiteContentFactory.build( is_page_content=False, type=nav_menu_config_item.name, metadata={"mainmenu": example_menu_data}, ) serialized_data = HugoMenuYamlFileSerializer(omnibus_config).serialize(content) parsed_serialized_data = yaml.load(serialized_data, Loader=yaml.SafeLoader) assert parsed_serialized_data == { "mainmenu": [ {**example_menu_data[0], "url": "/path/to/myfile"}, example_menu_data[1], ] }
def test_unassigned_youtube_ids(mocker, is_ocw): """videos_with_unassigned_youtube_ids should return WebsiteContent objects for videos with no youtube ids""" mocker.patch("websites.api.is_ocw_site", return_value=is_ocw) website = WebsiteFactory.create() WebsiteContentFactory.create_batch( 4, website=website, metadata={ "resourcetype": RESOURCE_TYPE_VIDEO, "video_metadata": { "youtube_id": "abc123" }, }, ) videos_without_ids = [] videos_without_ids.append( WebsiteContentFactory.create( website=website, metadata={ "resourcetype": RESOURCE_TYPE_VIDEO, "video_metadata": {}, }, )) for yt_id in [None, ""]: videos_without_ids.append( WebsiteContentFactory.create( website=website, metadata={ "resourcetype": RESOURCE_TYPE_VIDEO, "video_metadata": { "youtube_id": yt_id }, }, )) WebsiteContentFactory.create( website=website, metadata={ "resourcetype": RESOURCE_TYPE_IMAGE, "video_metadata": { "youtube_id": "bad_data" }, }, ) unassigned_content = videos_with_unassigned_youtube_ids(website) if is_ocw: assert len(unassigned_content) == 3 for content in videos_without_ids: assert content in unassigned_content else: assert len(unassigned_content) == 0
def test_hugo_file_serialize(markdown, exp_sections): """HugoMarkdownFileSerializer.serialize should create the expected file contents""" metadata = {"metadata1": "dummy value 1", "metadata2": "dummy value 2"} content = WebsiteContentFactory.create( text_id="abcdefg", title="Content Title", type="sometype", markdown=markdown, metadata=metadata, ) site_config = SiteConfig(content.website.starter.config) file_content = HugoMarkdownFileSerializer(site_config).serialize( website_content=content ) md_file_sections = [ part for part in re.split(re.compile(r"^---\n", re.MULTILINE), file_content) # re.split returns a blank string as the first item here even though the file contents begin with the given # pattern. if part ] assert len(md_file_sections) == exp_sections front_matter = md_file_sections[0] front_matter_lines = list(filter(None, sorted(front_matter.split("\n")))) assert front_matter_lines == sorted( [ f"title: {content.title}", f"content_type: {content.type}", f"uid: {content.text_id}", ] + [f"{k}: {v}" for k, v in metadata.items()] ) if exp_sections > 1: assert md_file_sections[1] == markdown
def test_websites_content_edit_with_upload(mocker, drf_client, global_admin_user, file_upload): """Uploading a file when editing a new WebsiteContent object should work""" mime_type = "text/doof" mocker.patch("websites.serializers.detect_mime_type", return_value=mime_type) drf_client.force_login(global_admin_user) content = WebsiteContentFactory.create( type=constants.CONTENT_TYPE_RESOURCE, metadata={"title": "test"}) payload = {"file": file_upload, "title": "New Title"} resp = drf_client.patch( reverse( "websites_content_api-detail", kwargs={ "parent_lookup_website": content.website.name, "text_id": str(content.text_id), }, ), data=payload, format="multipart", ) assert resp.status_code == 200 content = WebsiteContent.objects.get(id=content.id) assert content.title == payload["title"] assert ( content.file.name == f"sites/{content.website.name}/{content.text_id.replace('-', '')}_{file_upload.name}" ) assert content.metadata["file_type"] == mime_type assert resp.data["text_id"] == str(content.text_id)
def test_get_destination_url(is_page_content, dirpath, filename, expected): """get_destination_url should create a url for a piece of content""" content = WebsiteContentFactory.create(is_page_content=is_page_content, dirpath=dirpath, filename=filename) assert (get_destination_url( content, SiteConfig(content.website.starter.config)) == expected)
def test_videos_with_truncatable_text(mocker, is_ocw): """Videos with titles or descriptions that are too long should be returned""" mocker.patch("websites.api.is_ocw_site", return_value=is_ocw) website = WebsiteFactory.create() title_descs = ( (" ".join(["TooLongTitle" for _ in range(10)]), "desc"), ("title", " ".join(["TooLongDescription" for _ in range(500)])), ("title", "desc"), ) resources = [] for title, desc in title_descs: resources.append( WebsiteContentFactory.create( website=website, title=title, metadata={ "description": desc, "resourcetype": RESOURCE_TYPE_VIDEO, "video_files": { "video_captions_file": "abc123" }, }, )) truncatable_content = videos_with_truncatable_text(website) assert len(resources[1].metadata["description"]) > 5000 if is_ocw: assert len(truncatable_content) == 2 for content in resources[0:2]: assert content in truncatable_content else: assert truncatable_content == []
def test_website_content_unpublished(): """Website should set has_unpublished_live and has_unpublished_draft if any related content is updated""" website = WebsiteFactory.create() content = WebsiteContentFactory.create(website=website) website.has_unpublished_live = False website.has_unpublished_draft = False website.save() other_content = WebsiteContentFactory.create() other_content.save() website.refresh_from_db() # website should not have changed since the content is for a different website assert website.has_unpublished_live is False assert website.has_unpublished_draft is False content.save() website.refresh_from_db() assert website.has_unpublished_live is True assert website.has_unpublished_draft is True
def test_website_content_detail_with_file_serializer(): """WebsiteContentDetailSerializer should include its file url in metadata""" content = WebsiteContentFactory.create(type="resource", metadata={"title": "Test"}) content.file = SimpleUploadedFile("test.txt", b"content") serialized_data = WebsiteContentDetailSerializer(instance=content).data assert serialized_data["image"] == content.file.url assert serialized_data["metadata"]["title"] == content.metadata["title"]
def test_create_content_sync_state(mocker): """ Test that the create_content_sync_state signal makes the correct call """ mock_api = mocker.patch("content_sync.signals.api", autospec=True) content = WebsiteContentFactory.create() mock_api.upsert_content_sync_state.assert_called_once_with(content) content.save() assert mock_api.upsert_content_sync_state.call_count == 2 mock_api.upsert_content_sync_state.assert_has_calls( [mocker.call(content), mocker.call(content)])
def test_update_video(settings, mocker, youtube_mocker, privacy): """update_video should send the correct data in a request to update youtube metadata""" speakers = "speaker1, speaker2" tags = "tag1, tag2" youtube_id = "test video description" title = "TitleLngt>" description = "DescLngth>" content = WebsiteContentFactory.create( title=" ".join([title for i in range(11)]), metadata={ "resourcetype": RESOURCE_TYPE_VIDEO, "description": " ".join([description for _ in range(501)]), "video_metadata": { "youtube_id": youtube_id, "video_tags": tags, "video_speakers": speakers, }, }, ) expected_title = f'{" ".join([title.replace(">", "") for _ in range(9)])}...' expected_desc = f'{" ".join([description.replace(">", "") for _ in range(499)])}...' assert len(content.title) > YT_MAX_LENGTH_TITLE assert len(content.metadata["description"]) > YT_MAX_LENGTH_DESCRIPTION assert len(expected_title) <= YT_MAX_LENGTH_TITLE assert len(expected_desc) <= YT_MAX_LENGTH_DESCRIPTION mock_update_caption = mocker.patch( "videos.youtube.YouTubeApi.update_captions") YouTubeApi().update_video(content, privacy=privacy) youtube_mocker().videos.return_value.update.assert_any_call( part="snippet", body={ "id": youtube_id, "snippet": { "title": expected_title, "description": expected_desc, "tags": tags, "categoryId": settings.YT_CATEGORY_ID, }, }, ) if privacy is not None: youtube_mocker().videos.return_value.update.assert_any_call( part="status", body={ "id": youtube_id, "status": { "privacyStatus": privacy, "embeddable": True }, }, ) mock_update_caption.assert_called_once_with(content, youtube_id)
def test_sync_unsynced_websites(api_mock, backend_exists, create_backend, delete): """ Test that sync_all_content_to_backend is run on all websites needing a sync """ api_mock.get_sync_backend.return_value.backend_exists.return_value = backend_exists website_synced = WebsiteFactory.create( has_unpublished_live=False, has_unpublished_draft=False, live_publish_status=PUBLISH_STATUS_SUCCEEDED, draft_publish_status=PUBLISH_STATUS_SUCCEEDED, latest_build_id_live=1, latest_build_id_draft=2, ) websites_unsynced = WebsiteFactory.create_batch(2) with mute_signals(post_save): ContentSyncStateFactory.create( current_checksum="a1", synced_checksum="a1", content=WebsiteContentFactory.create(website=website_synced), ) ContentSyncStateFactory.create_batch( 2, content=WebsiteContentFactory.create(website=websites_unsynced[0])) ContentSyncStateFactory.create_batch( 2, content=WebsiteContentFactory.create(website=websites_unsynced[1])) tasks.sync_unsynced_websites.delay(create_backends=create_backend, delete=delete) for website in websites_unsynced: api_mock.get_sync_backend.assert_any_call(website) website.refresh_from_db() assert website.has_unpublished_live is True assert website.has_unpublished_draft is True assert website.live_publish_status is None assert website.draft_publish_status is None assert website.latest_build_id_live is None assert website.latest_build_id_draft is None with pytest.raises(AssertionError): api_mock.get_sync_backend.assert_any_call(website_synced) assert (api_mock.get_sync_backend.return_value.sync_all_content_to_backend. call_count == (2 if (create_backend or backend_exists) else 0)) assert (api_mock.get_sync_backend.return_value. delete_orphaned_content_in_backend.call_count == ( 2 if delete and (create_backend or backend_exists) else 0))
def test_videos_missing_captions(mocker, is_ocw): """videos_missing_captions should return WebsiteContent objects for videos with no captions""" mocker.patch("websites.api.is_ocw_site", return_value=is_ocw) website = WebsiteFactory.create() WebsiteContentFactory.create_batch( 3, website=website, metadata={ "resourcetype": RESOURCE_TYPE_VIDEO, "video_files": { "video_captions_file": "abc123" }, }, ) videos_without_captions = [] for captions in [None, ""]: videos_without_captions.append( WebsiteContentFactory.create( website=website, metadata={ "resourcetype": RESOURCE_TYPE_VIDEO, "video_files": { "video_captions_file": captions }, }, )) WebsiteContentFactory.create( website=website, metadata={ "resourcetype": RESOURCE_TYPE_IMAGE, "video_files": { "video_captions_file": "bad_data" }, }, ) unassigned_content = videos_missing_captions(website) if is_ocw: assert len(unassigned_content) == 2 for content in videos_without_captions: assert content in unassigned_content else: assert len(unassigned_content) == 0
def test_website_content_detail_serializer(): """WebsiteContentDetailSerializer should serialize all relevant fields to the frontend""" content = WebsiteContentFactory.create() serialized_data = WebsiteContentDetailSerializer(instance=content).data assert serialized_data["text_id"] == str(content.text_id) assert serialized_data["title"] == content.title assert serialized_data["type"] == content.type assert serialized_data["updated_on"] == content.updated_on.isoformat( )[:-6] + "Z" assert serialized_data["markdown"] == content.markdown assert serialized_data["metadata"] == content.metadata
def test_website_content_serializer(): """WebsiteContentSerializer should serialize a few fields to identify the content""" content = WebsiteContentFactory.create() serialized_data = WebsiteContentSerializer(instance=content).data assert serialized_data["text_id"] == str(content.text_id) assert serialized_data["title"] == content.title assert serialized_data["type"] == content.type assert serialized_data["updated_on"] == content.updated_on.isoformat( )[:-6] + "Z" assert "markdown" not in serialized_data assert "metadata" not in serialized_data
def test_start_transcript_job(mocker, settings, video_resource): """test start_transcript_job""" youtube_id = "test" threeplay_file_id = 1 settings.YT_FIELD_ID = "youtube_id" video_file = VideoFileFactory.create( status=VideoStatus.CREATED, destination=DESTINATION_YOUTUBE, destination_id=youtube_id, ) video = video_file.video video.source_key = "the/file" video.save() mock_threeplay_upload_video_request = mocker.patch( "videos.tasks.threeplay_api.threeplay_upload_video_request", return_value={"data": {"id": threeplay_file_id}}, ) mock_order_transcript_request_request = mocker.patch( "videos.tasks.threeplay_api.threeplay_order_transcript_request" ) if video_resource: title = "title" WebsiteContentFactory.create( website=video.website, metadata={"youtube_id": youtube_id}, title=title ) else: title = "file" start_transcript_job(video.id) mock_threeplay_upload_video_request.assert_called_once_with( video.website.short_id, youtube_id, title ) mock_order_transcript_request_request.assert_called_once_with( video.id, threeplay_file_id )
def test_create_gdrive_resource_content_update(mock_get_s3_content_type): """create_resource_from_gdrive should update a WebsiteContent object linked to a DriveFile object""" content = WebsiteContentFactory.create(file="test/path/old.doc") drive_file = DriveFileFactory.create(website=content.website, s3_key="test/path/word.docx", resource=content) assert content.file != drive_file.s3_key create_gdrive_resource_content(drive_file) content.refresh_from_db() drive_file.refresh_from_db() assert content.file == drive_file.s3_key assert drive_file.resource == content
def test_website_content_detail_serializer_save(mocker): """WebsiteContentDetailSerializer should modify only certain fields""" mock_update_website_backend = mocker.patch( "websites.serializers.update_website_backend") mock_create_website_pipeline = mocker.patch( "websites.serializers.create_website_publishing_pipeline") content = WebsiteContentFactory.create( type=CONTENT_TYPE_RESOURCE, metadata={ "to_keep": "old value 1", "to_update": "old value 2", }, ) existing_text_id = content.text_id new_title = f"{content.title} with some more text" new_type = f"{content.type}_other" new_markdown = "hopefully different from the previous markdown" metadata_patch = {"to_update": "updated value 2", "created": "brand new!"} user = UserFactory.create() # uuid value is invalid but it's ignored since it's marked readonly serializer = WebsiteContentDetailSerializer( data={ "title": new_title, "text_id": "----", "type": new_type, "markdown": new_markdown, "metadata": metadata_patch, }, instance=content, context={ "view": mocker.Mock( kwargs={"parent_lookup_website": content.website.name}), "request": mocker.Mock(user=user), }, ) serializer.is_valid(raise_exception=True) serializer.save() content.refresh_from_db() assert content.title == new_title assert content.text_id == existing_text_id assert content.type != new_type assert content.markdown == new_markdown assert content.metadata == { "to_keep": "old value 1", "to_update": "updated value 2", "created": "brand new!", } assert content.updated_by == user mock_update_website_backend.assert_called_once_with(content.website) mock_create_website_pipeline.assert_not_called()
def test_upsert_content_files(mocker, mock_api_wrapper, db_data): """upsert_content_files should upsert all content files for each distinct user in one commit per user""" expected_num_users = 2 # Create a record and delete it to test that upsert_content_files_for_user still queries for deleted records content_to_delete = WebsiteContentFactory.create(website=db_data.website) content_to_delete.delete() patched_upsert_for_user = mocker.patch.object( mock_api_wrapper, "upsert_content_files_for_user") mock_api_wrapper.upsert_content_files() assert patched_upsert_for_user.call_count == (expected_num_users + 1) for user in db_data.users: patched_upsert_for_user.assert_any_call(user.id) patched_upsert_for_user.assert_any_call(None)
def youtube_website(mocker): """Return a website with youtube resources""" website = WebsiteFactory.create() WebsiteContentFactory.create( type=CONTENT_TYPE_RESOURCE, metadata={ "resourcetype": RESOURCE_TYPE_IMAGE, "video_metadata": { "youtube_id": "fakeid" }, }, ) for youtube_id in ["", None, "abc123", "def456"]: WebsiteContentFactory.create( website=website, type=CONTENT_TYPE_RESOURCE, metadata={ "resourcetype": RESOURCE_TYPE_VIDEO, "video_metadata": { "youtube_id": youtube_id }, }, ) return website
def test_upsert_content_sync_state_create(): """ Verify that upsert_content_sync_state creates a ContentSyncState record for the content """ with mute_signals(post_save): content = WebsiteContentFactory.create(markdown="abc") assert getattr(content, "content_sync_state", None) is None api.upsert_content_sync_state(content) content.refresh_from_db() abc_checksum = content.calculate_checksum() assert content.content_sync_state is not None assert content.content_sync_state.synced_checksum is None assert content.content_sync_state.current_checksum == abc_checksum
def test_create_gdrive_resource_content_error(mocker): """create_resource_from_gdrive should log an exception, update status if something goes wrong""" mocker.patch( "gdrive_sync.api.get_s3_content_type", return_value=Exception("Could not determine resource type"), ) mock_log = mocker.patch("gdrive_sync.api.log.exception") content = WebsiteContentFactory.create() drive_file = DriveFileFactory.create(website=content.website, s3_key="test/path/word.docx", resource=content) create_gdrive_resource_content(drive_file) content.refresh_from_db() drive_file.refresh_from_db() assert drive_file.status == DriveFileStatus.FAILED mock_log.assert_called_once_with( "Error creating resource for drive file %s", drive_file.file_id)
def test_data_file_serialize(serializer_cls): """JsonFileSerializer and YamlFileSerializer.serialize should create the expected data file contents""" metadata = {"metadata1": "dummy value 1", "metadata2": "dummy value 2"} content = WebsiteContentFactory.create( text_id="abcdefg", title="Content Title", type="sometype", metadata=metadata, ) site_config = SiteConfig(content.website.starter.config) file_content = serializer_cls(site_config).serialize(website_content=content) parsed_file_content = ( json.loads(file_content) if serializer_cls == JsonFileSerializer else yaml.load(file_content, Loader=yaml.SafeLoader) ) assert parsed_file_content == {**metadata, "title": "Content Title"}
def test_metadata_file_serialize(): """JsonFileSerializer should create the expected data file contents for sitemetadata files""" metadata = {"metadata1": "dummy value 1", "metadata2": "dummy value 2"} content = WebsiteContentFactory.create( text_id="abcdefg", title="Content Title", type="sitemetadata", metadata=metadata, ) site_config = SiteConfig(content.website.starter.config) file_content = JsonFileSerializer(site_config).serialize(website_content=content) parsed_file_content = json.loads(file_content) assert parsed_file_content == { **metadata, "site_uid": str(content.website.uuid), "title": "Content Title", }
def test_websites_content_detail(drf_client, global_admin_user, content_context): """The detail view for WebsiteContent should return serialized data""" drf_client.force_login(global_admin_user) content = WebsiteContentFactory.create(type="other") url = reverse( "websites_content_api-detail", kwargs={ "parent_lookup_website": content.website.name, "text_id": str(content.text_id), }, ) resp = drf_client.get(f"{url}?content_context={content_context}") assert (resp.data == WebsiteContentDetailSerializer(instance=content, context={ "content_context": content_context }).data)
def test_website_content_detail_serializer_youtube_ocw(settings, is_resource): """WebsiteContent serializers should conditionally fill in youtube thumbnail metadata""" settings.OCW_IMPORT_STARTER_SLUG = "course" starter = WebsiteStarter.objects.get(slug=settings.OCW_IMPORT_STARTER_SLUG) website = WebsiteFactory.create(starter=starter) youtube_id = "abc123" content_type = "resource" if is_resource else "page" existing_content = WebsiteContentFactory.create( type=content_type, website=website, ) data = ({ "metadata": { "video_metadata": { "youtube_id": youtube_id }, "video_files": { "video_thumbnail_file": "" }, }, } if is_resource else { "metadata": { "body": "text" } }) existing_serializer = WebsiteContentDetailSerializer() existing_serializer.update(existing_content, data) data["type"] = content_type data["title"] = "new content" new_serializer = WebsiteContentCreateSerializer() new_serializer.context["website_id"] = website.uuid new_content = new_serializer.create(data) for content in [existing_content, new_content]: if is_resource: assert content.metadata["video_metadata"][ "youtube_id"] == youtube_id assert content.metadata["video_files"][ "video_thumbnail_file"] == YT_THUMBNAIL_IMG.format( video_id=youtube_id) else: assert content.metadata["body"] == "text"
def test_websites_content_delete(drf_client, permission_groups, mocker): """DELETEing a WebsiteContent should soft-delete the object""" update_website_backend_mock = mocker.patch( "websites.views.update_website_backend") drf_client.force_login(permission_groups.global_admin) content = WebsiteContentFactory.create( updated_by=permission_groups.site_editor) resp = drf_client.delete( reverse( "websites_content_api-detail", kwargs={ "parent_lookup_website": content.website.name, "text_id": str(content.text_id), }, )) assert resp.data is None content.refresh_from_db() assert content.updated_by == permission_groups.global_admin assert content.deleted is not None update_website_backend_mock.assert_called_once_with(content.website)
def test_website_content_detail_serializer_save_null_metadata(mocker): """WebsiteContentDetailSerializer should save if metadata is null""" mock_update_website_backend = mocker.patch( "websites.serializers.update_website_backend") mock_create_website_pipeline = mocker.patch( "websites.serializers.create_website_publishing_pipeline") content = WebsiteContentFactory.create( type=CONTENT_TYPE_RESOURCE, metadata=None, ) existing_text_id = content.text_id new_markdown = "hopefully this saves without error" metadata_patch = {"meta": "data"} user = UserFactory.create() # uuid value is invalid but it's ignored since it's marked readonly serializer = WebsiteContentDetailSerializer( data={ "text_id": "----", "markdown": new_markdown, "metadata": metadata_patch, }, instance=content, context={ "view": mocker.Mock( kwargs={"parent_lookup_website": content.website.name}), "request": mocker.Mock(user=user), }, ) serializer.is_valid(raise_exception=True) serializer.save() content.refresh_from_db() assert content.text_id == existing_text_id assert content.markdown == new_markdown assert content.metadata == {"meta": "data"} assert content.updated_by == user mock_update_website_backend.assert_called_once_with(content.website) mock_create_website_pipeline.assert_not_called()
def test_upsert_content_sync_state_update(settings): """ Verify that upsert_content_sync_state updates a ContentSyncState record for the content """ settings.CONTENT_SYNC_BACKEND = "content_sync.backends.SampleBackend" content = WebsiteContentFactory.create(markdown="abc") abc_checksum = content.calculate_checksum() sync_state = content.content_sync_state sync_state.current_checksum = abc_checksum sync_state.synced_checksum = abc_checksum sync_state.save() content.markdown = "def" def_checksum = content.calculate_checksum() with mute_signals(post_save): content.save() api.upsert_content_sync_state(content) content.content_sync_state.refresh_from_db() assert content.content_sync_state.synced_checksum == abc_checksum assert content.content_sync_state.current_checksum == def_checksum