def test_baseurl_replacer_replaces_content_in_same_course( website_uuid, should_markdown_change ): """ Double check that if the dirpath + filename match multiple times, the content chosen is from the same course as the markdown being edited """ markdown = R""" Kittens [meow]({{< baseurl >}}/resources/pets/cat) meow. """ w1 = WebsiteFactory.build(uuid="website-uuid-111") w2 = WebsiteFactory.build(uuid="website-uuid-222") websites = {w.uuid: w for w in [w1, w2]} target_content = WebsiteContentFactory.build(markdown=markdown, website=w1) linkable = WebsiteContentFactory.build( website=websites[website_uuid], dirpath="content/resources/pets", filename="cat", text_id="uuid-111", ) cleaner = get_markdown_cleaner([linkable]) cleaner.update_website_content(target_content) is_markdown_changed = target_content.markdown != markdown assert is_markdown_changed == should_markdown_change
def test_publish_endpoint_list(settings, drf_client, version): """The WebsitePublishView endpoint should return the appropriate info for correctly filtered sites""" ocw_sites = WebsiteFactory.create_batch( 2, source=constants.WEBSITE_SOURCE_OCW_IMPORT) draft_published = WebsiteFactory.create_batch( 2, source=constants.WEBSITE_SOURCE_STUDIO, draft_publish_status=constants.PUBLISH_STATUS_NOT_STARTED, live_publish_status=None, ) live_published = WebsiteFactory.create_batch( 2, source=constants.WEBSITE_SOURCE_STUDIO, draft_publish_status=None, live_publish_status=constants.PUBLISH_STATUS_SUCCEEDED, ) expected_sites = ocw_sites + (draft_published if version == VERSION_DRAFT else live_published) settings.API_BEARER_TOKEN = "abc123" drf_client.credentials( HTTP_AUTHORIZATION=f"Bearer {settings.API_BEARER_TOKEN}") resp = drf_client.get(f'{reverse("publish_api-list")}?version={version}') assert resp.status_code == 200 site_dict = {site["name"]: site for site in resp.data["sites"]} assert len(site_dict.keys()) == 4 for expected_site in expected_sites: publish_site = site_dict.get(expected_site.name, None) assert publish_site is not None assert publish_site["short_id"] == expected_site.short_id
def test_is_ocw_site(settings): """is_ocw_site() should return expected bool value for a website""" settings.OCW_IMPORT_STARTER_SLUG = "ocw-course" ocw_site = WebsiteFactory.create(starter=WebsiteStarterFactory.create( slug="ocw-course")) other_site = WebsiteFactory.create(starter=WebsiteStarterFactory.create( slug="not-ocw-course")) assert is_ocw_site(ocw_site) is True assert is_ocw_site(other_site) is False
def test_website_serializer(has_starter): """WebsiteSerializer should serialize a Website object with the correct fields""" website = (WebsiteFactory.build() if has_starter else WebsiteFactory.build( starter=None)) serialized_data = WebsiteSerializer(instance=website).data assert serialized_data["name"] == website.name assert serialized_data["title"] == website.title assert serialized_data["metadata"] == website.metadata assert "config" not in serialized_data
def test_fetch_website_not_found(): """fetch_website should raise if a matching website was not found""" WebsiteFactory.create( uuid=UUID(EXAMPLE_UUID_STR, version=4), title="my title", name="my name", ) with pytest.raises(Website.DoesNotExist): fetch_website("bad values")
def test_url_site_relativiser(url, expected_index, expected_relative_url): w1 = WebsiteFactory.build(name="website_zero") w2 = WebsiteFactory.build(name="website_one") sites = [w1, w2] with patch_website_all(sites): get_site_relative_url = UrlSiteRelativiser() assert get_site_relative_url(url) == ( sites[expected_index], expected_relative_url, )
def test_upsert_mass_publish_pipeline(settings, pipeline_settings, mocker, mock_auth, pipeline_exists, version): # pylint:disable=too-many-locals,too-many-arguments """The mass publish pipeline should have expected configuration""" hugo_projects_path = "https://github.com/org/repo" WebsiteFactory.create( starter=WebsiteStarterFactory.create( source=STARTER_SOURCE_GITHUB, path=f"{hugo_projects_path}/site"), name=settings.ROOT_WEBSITE_NAME, ) instance_vars = f'?vars={quote(json.dumps({"version": version}))}' url_path = f"/api/v1/teams/{settings.CONCOURSE_TEAM}/pipelines/{BaseMassPublishPipeline.PIPELINE_NAME}/config{instance_vars}" if not pipeline_exists: mock_get = mocker.patch( "content_sync.pipelines.concourse.ConcourseApi.get_with_headers", side_effect=HTTPError(), ) else: mock_get = mocker.patch( "content_sync.pipelines.concourse.ConcourseApi.get_with_headers", return_value=({}, { "X-Concourse-Config-Version": "3" }), ) mock_put_headers = mocker.patch( "content_sync.pipelines.concourse.ConcourseApi.put_with_headers") pipeline = MassPublishPipeline(version) pipeline.upsert_pipeline() mock_get.assert_any_call(url_path) mock_put_headers.assert_any_call( url_path, data=mocker.ANY, headers=({ "X-Concourse-Config-Version": "3" } if pipeline_exists else None), ) _, kwargs = mock_put_headers.call_args_list[0] if version == VERSION_DRAFT: bucket = settings.AWS_PREVIEW_BUCKET_NAME api_url = settings.OCW_STUDIO_DRAFT_URL else: bucket = settings.AWS_PUBLISH_BUCKET_NAME api_url = settings.OCW_STUDIO_LIVE_URL config_str = json.dumps(kwargs) assert settings.OCW_GTM_ACCOUNT_ID in config_str assert bucket in config_str assert version in config_str assert f"{hugo_projects_path}.git" in config_str assert api_url in config_str
def test_website_endpoint_search(drf_client): """ should limit the queryset based on the search param """ superuser = UserFactory.create(is_superuser=True) drf_client.force_login(superuser) WebsiteFactory.create(title="Apple", name="Bacon", short_id="Cheese").save() WebsiteFactory.create(title="Xylophone", name="Yellow", short_id="Zebra").save() WebsiteFactory.create( title="U.S. Military Power", name="17-482-u-s-military-power-spring-2015", short_id="17.482-Spring-2015", ).save() WebsiteFactory.create( title="Biomedical Signal and Image Processing", name="hst-582j-biomedical-signal-and-image-processing-spring-2007", short_id="HST.582J-Spring-2007", ).save() for word in ["Apple", "Bacon", "Cheese"]: resp = drf_client.get(reverse("websites_api-list"), {"search": word}) assert [website["title"] for website in resp.data.get("results")] == ["Apple"] for word in ["Xylophone", "Yellow", "Zebra"]: resp = drf_client.get(reverse("websites_api-list"), {"search": word}) assert [website["title"] for website in resp.data.get("results")] == ["Xylophone"] for word in ["U.S. military", "17-482", "17.482"]: resp = drf_client.get(reverse("websites_api-list"), {"search": word}) assert [website["title"] for website in resp.data.get("results") ] == ["U.S. Military Power"] for word in ["signal and image", "HsT.582", "hSt-582"]: resp = drf_client.get(reverse("websites_api-list"), {"search": word}) assert [website["title"] for website in resp.data.get("results") ] == ["Biomedical Signal and Image Processing"]
def test_website_starter_unpublished(): """Website should set has_unpublished_live and has_unpublished_draft if the starter is updated""" website = WebsiteFactory.create(has_unpublished_live=False, has_unpublished_draft=False) second_website = WebsiteFactory.create(has_unpublished_live=False, has_unpublished_draft=False, starter=website.starter) website.starter.save() website.refresh_from_db() assert website.has_unpublished_draft is True assert website.has_unpublished_live is True second_website.refresh_from_db() assert second_website.has_unpublished_draft is True assert second_website.has_unpublished_live is True
def test_resolveuid_conversion_cross_site(markdown, expected): """Check shortcodes are used within same site.""" target_content = WebsiteContentFactory.build( markdown=markdown, website=WebsiteFactory.build() ) linked_content = WebsiteContentFactory.build( text_id="5cf754b2-b97b-4ac1-8dab-deed1201de94", dirpath="content/pages/path/to", filename="thing", website=WebsiteFactory.build(name="other-site-name"), ) cleaner = get_markdown_cleaner([target_content, linked_content]) cleaner.update_website_content(target_content) assert target_content.markdown == expected
def test_trigger_publish_live(settings, mocker): """Verify publish_website calls the appropriate task""" settings.CONTENT_SYNC_BACKEND = "content_sync.backends.SampleBackend" mock_task = mocker.patch("content_sync.tasks.publish_website_backend_live") website = WebsiteFactory.create() api.trigger_publish(website.name, VERSION_LIVE) mock_task.delay.assert_called_once_with(website.name)
def test_setup_website_groups_permissions(): """ Permissions should be assigned as expected """ owner, admin, editor = UserFactory.create_batch(3) website = WebsiteFactory.create(owner=owner) # permissions should have all been added via signal assert setup_website_groups_permissions(website) == (0, 0, False) website.admin_group.delete() assert setup_website_groups_permissions(website) == (1, 0, False) remove_perm(constants.PERMISSION_VIEW, website.editor_group, website) assert setup_website_groups_permissions(website) == (0, 1, False) remove_perm(constants.PERMISSION_PUBLISH, website.owner, website) assert setup_website_groups_permissions(website) == (0, 0, True) admin.groups.add(website.admin_group) editor.groups.add(website.editor_group) for permission in constants.PERMISSIONS_EDITOR: assert editor.has_perm(permission, website) is True for permission in constants.PERMISSIONS_ADMIN: for user in [owner, admin]: assert user.has_perm(permission, website) is True for permission in [ constants.PERMISSION_PUBLISH, constants.PERMISSION_COLLABORATE ]: assert editor.has_perm(permission, website) is False
def test_update_youtube_metadata_no_videos(mocker): """Youtube API should not be instantiated if there are no videos""" mocker.patch("videos.youtube.is_ocw_site", return_value=True) mocker.patch("videos.youtube.is_youtube_enabled", return_value=True) mock_youtube = mocker.patch("videos.youtube.YouTubeApi") update_youtube_metadata(WebsiteFactory.create()) mock_youtube.assert_not_called()
def test_baseurl_replacer_handle_specific_url_replacements( url, content_relative_dirpath, filename ): """ Test specific replacements This test could perhaps be dropped. It was written before ContentLookup was moved to a separate module, and the functionality is tested their, now, too. """ website_uuid = "website-uuid" website = WebsiteFactory.build(uuid=website_uuid) markdown = f"my [pets]({{{{< baseurl >}}}}{url}) are legion" expected_markdown = 'my {{% resource_link content-uuid "pets" %}} are legion' target_content = WebsiteContentFactory.build(markdown=markdown, website=website) linkable = WebsiteContentFactory.build( website=website, dirpath=f"content{content_relative_dirpath}", filename=filename, text_id="content-uuid", ) cleaner = get_markdown_cleaner([linkable]) cleaner.update_website_content(target_content) assert target_content.markdown == expected_markdown
def test_mail_transcripts_complete_notification(settings, mocker): """mail_transcripts_complete_notification should send correct email to correct users""" website = WebsiteFactory.create() users = UserFactory.create_batch(4) for user in users[:2]: user.groups.add(website.admin_group) for user in users[2:]: user.groups.add(website.editor_group) mock_get_message_sender = mocker.patch("videos.tasks.get_message_sender") mock_sender = mock_get_message_sender.return_value.__enter__.return_value mail_transcripts_complete_notification(website) mock_get_message_sender.assert_called_once_with(VideoTranscriptingCompleteMessage) assert mock_sender.build_and_send_message.call_count == len(users) + 1 for user in users: mock_sender.build_and_send_message.assert_any_call( user, { "site": { "title": website.title, "url": urljoin(settings.SITE_BASE_URL, f"/sites/{website.name}"), }, }, )
def test_website_detail_serializer(settings, has_starter, drive_folder, drive_credentials): """WebsiteDetailSerializer should serialize a Website object with the correct fields, including config""" settings.DRIVE_SERVICE_ACCOUNT_CREDS = drive_credentials settings.DRIVE_SHARED_ID = "abc123" settings.DRIVE_UPLOADS_PARENT_FOLDER_ID = None website = WebsiteFactory.build( gdrive_folder=drive_folder, starter=(WebsiteStarterFactory.create() if has_starter else None), ) serialized_data = WebsiteDetailSerializer(instance=website).data assert serialized_data["name"] == website.name assert serialized_data["title"] == website.title assert serialized_data["metadata"] == website.metadata assert serialized_data["starter"] == (WebsiteStarterDetailSerializer( instance=website.starter).data if has_starter else None) assert serialized_data["source"] == website.source assert parse_date(serialized_data["publish_date"]) == website.publish_date assert (parse_date( serialized_data["draft_publish_date"]) == website.draft_publish_date) assert serialized_data["live_url"] == website.get_url("live") assert serialized_data["draft_url"] == website.get_url("draft") assert serialized_data[ "has_unpublished_live"] == website.has_unpublished_live assert serialized_data[ "has_unpublished_draft"] == website.has_unpublished_draft assert serialized_data["gdrive_url"] == ( f"https://drive.google.com/drive/folders/abc123/{website.gdrive_folder}" if drive_credentials is not None and drive_folder is not None else None)
def permission_groups(): """Set up groups, users and websites for permission testing""" ( global_admin, global_author, site_owner, site_admin, site_editor, ) = UserFactory.create_batch(5) websites = WebsiteFactory.create_batch(2, owner=site_owner) global_admin.groups.add(Group.objects.get(name=constants.GLOBAL_ADMIN)) global_author.groups.add(Group.objects.get(name=constants.GLOBAL_AUTHOR)) site_admin.groups.add(websites[0].admin_group) site_editor.groups.add(websites[0].editor_group) website = websites[0] owner_content = WebsiteContentFactory.create(website=website, owner=website.owner) editor_content = WebsiteContentFactory.create(website=website, owner=site_editor) yield SimpleNamespace( global_admin=global_admin, global_author=global_author, site_admin=site_admin, site_editor=site_editor, websites=websites, owner_content=owner_content, editor_content=editor_content, )
def test_upsert_pipeline_public_vs_private(settings, mocker, mock_auth, is_private_repo): """Pipeline config shoould have expected course-markdown git url and private git key setting if applicable""" settings.CONCOURSE_IS_PRIVATE_REPO = is_private_repo settings.GIT_DOMAIN = "github.test.edu" settings.GIT_ORGANIZATION = "testorg" settings.OCW_STUDIO_DRAFT_URL = "https://draft.test.edu" settings.OCW_STUDIO_LIVE_URL = "https://live.test.edu" mocker.patch( "content_sync.pipelines.concourse.ConcourseApi.get_with_headers", return_value=(None, { "X-Concourse-Config-Version": 1 }), ) mock_put_headers = mocker.patch( "content_sync.pipelines.concourse.ConcourseApi.put_with_headers") starter = WebsiteStarterFactory.create( source=STARTER_SOURCE_GITHUB, path="https://github.com/org/repo/site") website = WebsiteFactory.create(starter=starter) private_key_str = "((git-private-key))" if is_private_repo: repo_url_str = f"git@{settings.GIT_DOMAIN}:{settings.GIT_ORGANIZATION}/{website.short_id}.git" else: repo_url_str = f"https://{settings.GIT_DOMAIN}/{settings.GIT_ORGANIZATION}/{website.short_id}.git" pipeline = SitePipeline(website) pipeline.upsert_pipeline() _, kwargs = mock_put_headers.call_args_list[0] config_str = json.dumps(kwargs) assert repo_url_str in config_str assert (private_key_str in config_str) is is_private_repo
def test_incomplete_content_warnings(mocker, has_missing_ids, has_missing_captions, has_truncatable_text): """incomplete_content_warnings should return expected warning messages""" website = WebsiteFactory.create() video_content = WebsiteContentFactory.create_batch(3, website=website) no_yt_ids = video_content[0:2] if has_missing_ids else [] no_caps = video_content[1:3] if has_missing_captions else [] truncatable_vids = [video_content[2]] if has_truncatable_text else [] mocker.patch("websites.api.videos_with_truncatable_text", return_value=truncatable_vids) mocker.patch( "websites.api.videos_with_unassigned_youtube_ids", return_value=no_yt_ids, ) mocker.patch( "websites.api.videos_missing_captions", return_value=no_caps, ) warnings = incomplete_content_warnings(website) warnings_len = 0 if has_missing_ids: warnings_len += 1 for content in no_yt_ids: assert content.title in warnings[0] if has_missing_captions: warnings_len += 1 for content in no_caps: assert content.title in warnings[1 if has_missing_ids else 0] if has_truncatable_text: warnings_len += 1 assert len(warnings) == warnings_len assert video_content[2].title in warnings[warnings_len - 1] if not has_missing_ids and not has_missing_captions and not has_truncatable_text: assert warnings == []
def get_updated_content_and_parent(update_field): """Run update_content_from_s3_data with test data and return content, parent""" website = WebsiteFactory.build() content = WebsiteContentFactory.build( markdown="original markdown", metadata={"title": "original title"}, website=website, ) content.save = Mock() # prepare the parent, but do not set content.parent_id. # that's one of the things we'll test parent = WebsiteContentFactory.build(id=123) s3_content_data = { "markdown": "s3 markdown", "metadata": { "title": "s3 title", "author": "s3 author", "parent_uid": "s3_parent_uid", }, "parent": parent, } with patch("websites.models.WebsiteContent.objects") as mock: mock.filter.return_value.first.return_value = content website = content.website text_id = content.text_id update_content_from_s3_data(website, text_id, s3_content_data, update_field) return content, parent
def test_data_file_deserialize(serializer_cls, file_content): """ JsonFileSerializer and YamlFileSerializer.deserialize should create the expected content object from some data file contents """ website = WebsiteFactory.create() site_config = SiteConfig(website.starter.config) file_config_item = next( config_item for config_item in site_config.iter_items() if "file" in config_item.item ) filepath = file_config_item.item["file"] website_content = serializer_cls(site_config).deserialize( website=website, filepath=filepath, file_contents=file_content, ) assert website_content.title == "Content Title" assert website_content.type == file_config_item.item["name"] assert website_content.text_id == file_config_item.item["name"] assert website_content.is_page_content is False assert website_content.metadata == { "tags": ["Design"], "description": "**This** is the description", }
def test_content_create_page_added_context_with_slug(drf_client, global_admin_user): """ POSTing to the WebsiteContent list view without a filename should add a generated filename based on the slug field """ drf_client.force_login(global_admin_user) title = "My Title" website = WebsiteFactory.create() website.starter.config["collections"][0]["slug"] = "text_id" website.starter.save() payload = { "title": title, "markdown": "some markdown", "type": "blog", } # "folder" path for the config item with type="blog" in basic-site-config.yml expected_dirpath = "content/blog" resp = drf_client.post( reverse( "websites_content_api-list", kwargs={ "parent_lookup_website": website.name, }, ), data=payload, ) assert resp.status_code == 201 content = website.websitecontent_set.order_by("-created_on").first() assert content.website == website assert content.filename == content.text_id # "folder" path for the config item with type="blog" in basic-site-config.yml assert content.dirpath == expected_dirpath assert content.is_page_content is True
def test_publish_website_batch(mocker, version, prepublish, trigger): """publish_website_batch should make the expected function calls""" mock_import_string = mocker.patch("content_sync.tasks.import_string") mock_publish_website = mocker.patch("content_sync.api.publish_website") mock_throttle = mocker.patch( "content_sync.tasks.api.throttle_git_backend_calls") website_names = sorted( [website.name for website in WebsiteFactory.create_batch(3)]) expected_api = (mock_import_string.return_value.get_api.return_value if trigger else None) tasks.publish_website_batch(website_names, version, prepublish=prepublish, trigger_pipeline=trigger) for name in website_names: mock_publish_website.assert_any_call( name, version, pipeline_api=expected_api, prepublish=prepublish, trigger_pipeline=trigger, ) assert mock_throttle.call_count == len(website_names) assert mock_import_string.call_count == (len(website_names) + 1 if trigger else len(website_names))
def test_upsert_website_pipeline_batch(mocker, settings, create_backend, unpause, check_limit): """upsert_website_pipeline_batch should make the expected function calls""" settings.GITHUB_RATE_LIMIT_CHECK = check_limit mock_get_backend = mocker.patch("content_sync.tasks.api.get_sync_backend") mock_get_pipeline = mocker.patch( "content_sync.tasks.api.get_sync_pipeline") mock_throttle = mocker.patch( "content_sync.tasks.api.throttle_git_backend_calls") websites = WebsiteFactory.create_batch(2) website_names = sorted([website.name for website in websites]) tasks.upsert_website_pipeline_batch(website_names, create_backend=create_backend, unpause=unpause) mock_get_pipeline.assert_any_call(websites[0], api=None) mock_get_pipeline.assert_any_call(websites[1], api=mocker.ANY) if create_backend: for website in websites: mock_get_backend.assert_any_call(website) mock_throttle.assert_any_call(mock_get_backend.return_value) mock_backend = mock_get_backend.return_value assert mock_backend.create_website_in_backend.call_count == 2 assert mock_backend.sync_all_content_to_backend.call_count == 2 else: mock_get_backend.assert_not_called() mock_pipeline = mock_get_pipeline.return_value assert mock_pipeline.upsert_pipeline.call_count == 2 if unpause: mock_pipeline.unpause_pipeline.assert_any_call(VERSION_DRAFT) mock_pipeline.unpause_pipeline.assert_any_call(VERSION_LIVE) else: mock_pipeline.unpause_pipeline.assert_not_called()
def test_websites_content_list_multiple_type(drf_client, global_admin_user): """The list view of WebsiteContent should be able to filter by multiple type values""" drf_client.force_login(global_admin_user) website = WebsiteFactory.create() WebsiteContentFactory.create_batch( 3, website=website, type=factory.Iterator(["page", "resource", "other"]), ) api_url = reverse( "websites_content_api-list", kwargs={ "parent_lookup_website": website.name, }, ) resp = drf_client.get( api_url, { "type[0]": "page", "type[1]": "resource" }, ) assert resp.data["count"] == 2 results = resp.data["results"] assert {result["type"] for result in results} == {"page", "resource"}
def test_websites_content_publish_sorting(drf_client, global_admin_user, published): # pylint: disable=unused-argument """should be able to filter to just published or not""" drf_client.force_login(global_admin_user) website = WebsiteFactory.create(published=True) unpublished = WebsiteContentFactory.create_batch( 3, website=website, # they were created after the publish date created_on=website.publish_date + datetime.timedelta(days=2), ) published = WebsiteContentFactory.create_batch( 3, website=website, ) for content in published: content.created_on = website.publish_date - datetime.timedelta(days=2) content.save() api_url = reverse( "websites_content_api-list", kwargs={ "parent_lookup_website": website.name, }, ) resp = drf_client.get(api_url, {"published": published}) content = published if published else unpublished expected_ids = sorted([c.text_id for c in content]) assert resp.data["count"] == 3 assert expected_ids == sorted([c["text_id"] for c in resp.data["results"]])
def test_videos_with_truncatable_text(mocker, is_ocw): """Videos with titles or descriptions that are too long should be returned""" mocker.patch("websites.api.is_ocw_site", return_value=is_ocw) website = WebsiteFactory.create() title_descs = ( (" ".join(["TooLongTitle" for _ in range(10)]), "desc"), ("title", " ".join(["TooLongDescription" for _ in range(500)])), ("title", "desc"), ) resources = [] for title, desc in title_descs: resources.append( WebsiteContentFactory.create( website=website, title=title, metadata={ "description": desc, "resourcetype": RESOURCE_TYPE_VIDEO, "video_files": { "video_captions_file": "abc123" }, }, )) truncatable_content = videos_with_truncatable_text(website) assert len(resources[1].metadata["description"]) > 5000 if is_ocw: assert len(truncatable_content) == 2 for content in resources[0:2]: assert content in truncatable_content else: assert truncatable_content == []
def test_websites_content_create_with_textid(drf_client, global_admin_user): """If a text_id is added when POSTing to the WebsiteContent, we should use that instead of creating a uuid""" drf_client.force_login(global_admin_user) website = WebsiteFactory.create() payload = { "type": "sitemetadata", "metadata": { "course_title": "a title", }, "text_id": "sitemetadata", } resp = drf_client.post( reverse( "websites_content_api-list", kwargs={ "parent_lookup_website": website.name, }, ), data=payload, ) assert resp.status_code == 201 content = website.websitecontent_set.get() assert content.type == payload["type"] assert resp.data["text_id"] == str(content.text_id) assert content.text_id == "sitemetadata"
def test_websitecontent_autogen_filename_unique(mocker, filename_base, existing_filenames, exp_result_filename, exclude_content): """ get_valid_new_filename should return a filename that obeys uniqueness constraints, adding a suffix and removing characters from the end of the string as necessary. """ # Set a lower limit for max filename length to test that filenames are truncated appropriately mocker.patch("websites.api.CONTENT_FILENAME_MAX_LEN", 14) content_type = "page" dirpath = "path/to" website = WebsiteFactory.create() contents = WebsiteContentFactory.create_batch( len(existing_filenames), website=website, type=content_type, dirpath=dirpath, filename=factory.Iterator(existing_filenames), ) exclude_text_id = contents[ 0].text_id if exclude_content and contents else None assert (get_valid_new_filename( website_pk=website.pk, dirpath=dirpath, filename_base=filename_base, exclude_text_id=exclude_text_id, ) == (exp_result_filename if not exclude_content else filename_base))
def test_website_status_serializer(mocker, settings, drive_folder, warnings): """WebsiteStatusSerializer should serialize a Website object with the correct status fields""" mocker.patch("websites.serializers.incomplete_content_warnings", return_value=warnings) settings.DRIVE_UPLOADS_PARENT_FOLDER_ID = "dfg789" settings.DRIVE_SERVICE_ACCOUNT_CREDS = {"key": "value"} settings.DRIVE_SHARED_ID = "abc123" values = { "publish_date": "2021-11-01T00:00:00Z", "draft_publish_date": "2021-11-02T00:00:00Z", "has_unpublished_live": True, "has_unpublished_draft": False, "live_publish_status": "succeeded", "live_publish_status_updated_on": "2021-11-03T00:00:00Z", "draft_publish_status": "errored", "draft_publish_status_updated_on": "2021-11-04T00:00:00Z", "sync_status": "Complete", "sync_errors": ["error1"], "synced_on": "2021-11-05T00:00:00Z", } website = WebsiteFactory.build(gdrive_folder=drive_folder, **values) serialized_data = WebsiteStatusSerializer(instance=website).data assert serialized_data["gdrive_url"] == ( f"https://drive.google.com/drive/folders/{settings.DRIVE_UPLOADS_PARENT_FOLDER_ID}/{website.gdrive_folder}" if drive_folder is not None else None) assert sorted(serialized_data["content_warnings"]) == sorted(warnings) for (key, value) in values.items(): assert serialized_data.get(key) == value